Kaynağa Gözat

combined all tests into one single Archiver folder PR

bigtedde 2 yıl önce
ebeveyn
işleme
2cf784d5c6

+ 86 - 1
conftest.py

@@ -2,12 +2,14 @@ import os
 
 
 import pytest
 import pytest
 
 
-# needed to get pretty assertion failures in unit tests:
+from borg.testsuite.archiver import exec_cmd
+
 if hasattr(pytest, "register_assert_rewrite"):
 if hasattr(pytest, "register_assert_rewrite"):
     pytest.register_assert_rewrite("borg.testsuite")
     pytest.register_assert_rewrite("borg.testsuite")
 
 
 
 
 import borg.cache  # noqa: E402
 import borg.cache  # noqa: E402
+from borg.archiver import Archiver
 from borg.logger import setup_logging  # noqa: E402
 from borg.logger import setup_logging  # noqa: E402
 
 
 # Ensure that the loggers exist for all tests
 # Ensure that the loggers exist for all tests
@@ -73,3 +75,86 @@ class DefaultPatches:
 @pytest.fixture(autouse=True)
 @pytest.fixture(autouse=True)
 def default_patches(request):
 def default_patches(request):
     return DefaultPatches(request)
     return DefaultPatches(request)
+
+
+@pytest.fixture()
+def set_env_variables():
+    os.environ["BORG_CHECK_I_KNOW_WHAT_I_AM_DOING"] = "YES"
+    os.environ["BORG_DELETE_I_KNOW_WHAT_I_AM_DOING"] = "YES"
+    os.environ["BORG_PASSPHRASE"] = "waytooeasyonlyfortests"
+    os.environ["BORG_SELFTEST"] = "disabled"
+
+
+class ArchiverSetup:
+    EXE: str = None  # python source based
+    FORK_DEFAULT = False
+    prefix = ""
+    BORG_EXES = []
+
+    def __init__(self):
+        self.archiver = None
+        self.tmpdir = str
+        self.repository_path = str
+        self.repository_location = str
+        self.input_path = str
+        self.output_path = str
+        self.keys_path = str
+        self.cache_path = str
+        self.exclude_file_path = str
+        self.patterns_file_path = str
+        self._old_wd = str
+
+
+@pytest.fixture()
+def archiver(tmp_path, set_env_variables):
+    archiver = ArchiverSetup()
+    archiver.archiver = not archiver.FORK_DEFAULT and Archiver() or None
+    archiver.tmpdir = tmp_path
+    archiver.repository_path = os.fspath(tmp_path / "repository")
+    archiver.repository_location = archiver.prefix + archiver.repository_path
+    archiver.input_path = os.fspath(tmp_path / "input")
+    archiver.output_path = os.fspath(tmp_path / "output")
+    archiver.keys_path = os.fspath(tmp_path / "keys")
+    archiver.cache_path = os.fspath(tmp_path / "cache")
+    archiver.exclude_file_path = os.fspath(tmp_path / "excludes")
+    archiver.patterns_file_path = os.fspath(tmp_path / "patterns")
+    os.environ["BORG_KEYS_DIR"] = archiver.keys_path
+    os.environ["BORG_CACHE_DIR"] = archiver.cache_path
+    os.mkdir(archiver.input_path)
+    os.chmod(archiver.input_path, 0o777)  # avoid troubles with fakeroot / FUSE
+    os.mkdir(archiver.output_path)
+    os.mkdir(archiver.keys_path)
+    os.mkdir(archiver.cache_path)
+    with open(archiver.exclude_file_path, "wb") as fd:
+        fd.write(b"input/file2\n# A comment line, then a blank line\n\n")
+    with open(archiver.patterns_file_path, "wb") as fd:
+        fd.write(b"+input/file_important\n- input/file*\n# A comment line, then a blank line\n\n")
+    archiver._old_wd = os.getcwd()
+    os.chdir(archiver.tmpdir)
+    yield archiver
+    os.chdir(archiver._old_wd)
+
+
+@pytest.fixture()
+def remote_archiver(archiver):
+    archiver.prefix = "ssh://__testsuite__"
+    archiver.repository_location = archiver.prefix + archiver.repository_path
+    return archiver
+
+
+@pytest.fixture()
+def check_binary_availability(archiver):
+    try:
+        exec_cmd("help", exe="borg.exe", fork=True)
+        archiver.BORG_EXES = ["python", "binary"]
+    except FileNotFoundError:
+        archiver.BORG_EXES = ["python"]
+
+
+@pytest.fixture()
+def binary_archiver(archiver, check_binary_availability):
+    if "binary" not in archiver.BORG_EXES:
+        pytest.skip("No borg.exe binary available")
+    archiver.EXE = "borg.exe"
+    archiver.FORK_DEFAULT = True
+    yield archiver

+ 409 - 281
src/borg/testsuite/archiver/__init__.py

@@ -1,13 +1,14 @@
 import errno
 import errno
+import filecmp
 import io
 import io
 import os
 import os
-import shutil
+import re
 import stat
 import stat
 import subprocess
 import subprocess
 import sys
 import sys
-import tempfile
 import time
 import time
 from configparser import ConfigParser
 from configparser import ConfigParser
+from contextlib import contextmanager
 from datetime import datetime
 from datetime import datetime
 from io import BytesIO, StringIO
 from io import BytesIO, StringIO
 
 
@@ -18,17 +19,19 @@ from ...archive import Archive
 from ...archiver import Archiver, PURE_PYTHON_MSGPACK_WARNING
 from ...archiver import Archiver, PURE_PYTHON_MSGPACK_WARNING
 from ...cache import Cache
 from ...cache import Cache
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
-from ...helpers import Location
+from ...helpers import Location, umount
 from ...helpers import EXIT_SUCCESS
 from ...helpers import EXIT_SUCCESS
 from ...helpers import bin_to_hex
 from ...helpers import bin_to_hex
 from ...logger import flush_logging
 from ...logger import flush_logging
 from ...manifest import Manifest
 from ...manifest import Manifest
+from ...platform import get_flags
 from ...remote import RemoteRepository
 from ...remote import RemoteRepository
 from ...repository import Repository
 from ...repository import Repository
-from .. import has_lchflags
-from .. import BaseTestCase, changedir, environment_variable
+from .. import has_lchflags, is_utime_fully_supported, have_fuse_mtime_ns, st_mtime_ns_round, no_selinux
+from .. import changedir
 from .. import are_symlinks_supported, are_hardlinks_supported, are_fifos_supported
 from .. import are_symlinks_supported, are_hardlinks_supported, are_fifos_supported
 from ..platform import is_win32
 from ..platform import is_win32
+from ...xattr import get_all
 
 
 RK_ENCRYPTION = "--encryption=repokey-aes-ocb"
 RK_ENCRYPTION = "--encryption=repokey-aes-ocb"
 KF_ENCRYPTION = "--encryption=keyfile-chacha20-poly1305"
 KF_ENCRYPTION = "--encryption=keyfile-chacha20-poly1305"
@@ -85,295 +88,420 @@ def exec_cmd(*args, archiver=None, fork=False, exe=None, input=b"", binary_outpu
             try:
             try:
                 ret = archiver.run(args)  # calls setup_logging internally
                 ret = archiver.run(args)  # calls setup_logging internally
             finally:
             finally:
-                flush_logging()  # usually done via atexit, but we do not exit here
+                flush_logging()  # usually done at exit, but we do not exit here
             output_text.flush()
             output_text.flush()
             return ret, output.getvalue() if binary_output else output.getvalue().decode()
             return ret, output.getvalue() if binary_output else output.getvalue().decode()
         finally:
         finally:
             sys.stdin, sys.stdout, sys.stderr = stdin, stdout, stderr
             sys.stdin, sys.stdout, sys.stderr = stdin, stdout, stderr
 
 
 
 
-# check if the binary "borg.exe" is available (for local testing a symlink to virtualenv/bin/borg should do)
-try:
-    exec_cmd("help", exe="borg.exe", fork=True)
-    BORG_EXES = ["python", "binary"]
-except FileNotFoundError:
-    BORG_EXES = ["python"]
+def checkts(ts):
+    # check if the timestamp is in the expected format
+    assert datetime.strptime(ts, ISO_FORMAT + "%z")  # must not raise
 
 
 
 
-@pytest.fixture(params=BORG_EXES)
-def cmd(request):
-    if request.param == "python":
-        exe = None
-    elif request.param == "binary":
-        exe = "borg.exe"
+def cmd(archiver, *args, **kw):
+    exit_code = kw.pop("exit_code", 0)
+    fork = kw.pop("fork", None)
+    binary_output = kw.get("binary_output", False)
+    if fork is None:
+        fork = archiver.FORK_DEFAULT
+    ret, output = exec_cmd(*args, archiver=archiver.archiver, fork=fork, exe=archiver.EXE, **kw)
+    if ret != exit_code:
+        print(output)
+    assert ret == exit_code
+    # if tests are run with the pure-python msgpack, there will be warnings about
+    # this in the output, which would make a lot of tests fail.
+    pp_msg = PURE_PYTHON_MSGPACK_WARNING.encode() if binary_output else PURE_PYTHON_MSGPACK_WARNING
+    empty = b"" if binary_output else ""
+    output = empty.join(line for line in output.splitlines(keepends=True) if pp_msg not in line)
+    return output
+
+
+def create_src_archive(archiver, name, ts=None):
+    repo_location, source_dir = archiver.repository_location, src_dir
+    if ts:
+        cmd(archiver, f"--repo={repo_location}", "create", "--compression=lz4", f"--timestamp={ts}", name, source_dir)
     else:
     else:
-        raise ValueError("param must be 'python' or 'binary'")
+        cmd(archiver, f"--repo={repo_location}", "create", "--compression=lz4", name, source_dir)
 
 
-    def exec_fn(*args, **kw):
-        return exec_cmd(*args, exe=exe, fork=True, **kw)
 
 
-    return exec_fn
+def open_archive(repo_path, name):
+    repository = Repository(repo_path, exclusive=True)
+    with repository:
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+        archive = Archive(manifest, name)
+    return archive, repository
 
 
 
 
-def checkts(ts):
-    # check if the timestamp is in the expected format
-    assert datetime.strptime(ts, ISO_FORMAT + "%z")  # must not raise
+def open_repository(archiver):
+    if archiver.prefix == "":
+        return Repository(archiver.repository_path, exclusive=True)
+    elif archiver.prefix == "ssh://__testsuite__":
+        return RemoteRepository(Location(archiver.repository_location))
+    else:
+        print(f"Archiver prefix '{archiver.prefix}' is not a valid prefix! Cannot open repo.")
+        return
+
+
+def create_regular_file(input_path, name, size=0, contents=None):
+    assert not (size != 0 and contents and len(contents) != size), "size and contents do not match"
+    filename = os.path.join(input_path, name)
+    if not os.path.exists(os.path.dirname(filename)):
+        os.makedirs(os.path.dirname(filename))
+    with open(filename, "wb") as fd:
+        if contents is None:
+            contents = b"X" * size
+        fd.write(contents)
+
+
+def create_test_files(input_path, create_hardlinks=True):
+    """Create a minimal test case including all supported file types"""
+    # File
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "flagfile", size=1024)
+    # Directory
+    create_regular_file(input_path, "dir2/file2", size=1024 * 80)
+    # File mode
+    os.chmod("input/file1", 0o4755)
+    # Hard link
+    if are_hardlinks_supported() and create_hardlinks:
+        os.link(os.path.join(input_path, "file1"), os.path.join(input_path, "hardlink"))
+    # Symlink
+    if are_symlinks_supported():
+        os.symlink("somewhere", os.path.join(input_path, "link1"))
+    create_regular_file(input_path, "fusexattr", size=1)
+    if not xattr.XATTR_FAKEROOT and xattr.is_enabled(input_path):
+        fn = os.fsencode(os.path.join(input_path, "fusexattr"))
+        # ironically, due to the way how fakeroot works, comparing FUSE file xattrs to orig file xattrs
+        # will FAIL if fakeroot supports xattrs, thus we only set the xattr if XATTR_FAKEROOT is False.
+        # This is because fakeroot with xattr-support does not propagate xattrs of the underlying file
+        # into "fakeroot space". Because the xattrs exposed by borgfs are these of an underlying file
+        # (from fakeroots point of view) they are invisible to the test process inside the fakeroot.
+        xattr.setxattr(fn, b"user.foo", b"bar")
+        xattr.setxattr(fn, b"user.empty", b"")
+        # XXX this always fails for me
+        # ubuntu 14.04, on a TMP dir filesystem with user_xattr, using fakeroot
+        # same for newer ubuntu and centos.
+        # if this is supported just on specific platform, platform should be checked first,
+        # so that the test setup for all tests using it does not fail here always for others.
+    # FIFO node
+    if are_fifos_supported():
+        os.mkfifo(os.path.join(input_path, "fifo1"))
+    if has_lchflags:
+        platform.set_flags(os.path.join(input_path, "flagfile"), stat.UF_NODUMP)
+
+    if is_win32:
+        have_root = False
+    else:
+        try:
+            # Block device
+            os.mknod("input/bdev", 0o600 | stat.S_IFBLK, os.makedev(10, 20))
+            # Char device
+            os.mknod("input/cdev", 0o600 | stat.S_IFCHR, os.makedev(30, 40))
+            # File owner
+            os.chown("input/file1", 100, 200)  # raises OSError invalid argument on cygwin
+            # File mode
+            os.chmod("input/dir2", 0o555)  # if we take away write perms, we need root to remove contents
+            have_root = True  # we have (fake)root
+        except PermissionError:
+            have_root = False
+        except OSError as e:
+            # Note: ENOSYS "Function not implemented" happens as non-root on Win 10 Linux Subsystem.
+            if e.errno not in (errno.EINVAL, errno.ENOSYS):
+                raise
+            have_root = False
+    time.sleep(1)  # "empty" must have newer timestamp than other files
+    create_regular_file(input_path, "empty", size=0)
+    return have_root
 
 
 
 
-class ArchiverTestCaseBase(BaseTestCase):
-    EXE: str = None  # python source based
-    FORK_DEFAULT = False
-    prefix = ""
-
-    def setUp(self):
-        os.environ["BORG_CHECK_I_KNOW_WHAT_I_AM_DOING"] = "YES"
-        os.environ["BORG_DELETE_I_KNOW_WHAT_I_AM_DOING"] = "YES"
-        os.environ["BORG_PASSPHRASE"] = "waytooeasyonlyfortests"
-        os.environ["BORG_SELFTEST"] = "disabled"
-        self.archiver = not self.FORK_DEFAULT and Archiver() or None
-        self.tmpdir = tempfile.mkdtemp()
-        self.repository_path = os.path.join(self.tmpdir, "repository")
-        self.repository_location = self.prefix + self.repository_path
-        self.input_path = os.path.join(self.tmpdir, "input")
-        self.output_path = os.path.join(self.tmpdir, "output")
-        self.keys_path = os.path.join(self.tmpdir, "keys")
-        self.cache_path = os.path.join(self.tmpdir, "cache")
-        self.exclude_file_path = os.path.join(self.tmpdir, "excludes")
-        self.patterns_file_path = os.path.join(self.tmpdir, "patterns")
-        os.environ["BORG_KEYS_DIR"] = self.keys_path
-        os.environ["BORG_CACHE_DIR"] = self.cache_path
-        os.mkdir(self.input_path)
-        os.chmod(self.input_path, 0o777)  # avoid troubles with fakeroot / FUSE
-        os.mkdir(self.output_path)
-        os.mkdir(self.keys_path)
-        os.mkdir(self.cache_path)
-        with open(self.exclude_file_path, "wb") as fd:
-            fd.write(b"input/file2\n# A comment line, then a blank line\n\n")
-        with open(self.patterns_file_path, "wb") as fd:
-            fd.write(b"+input/file_important\n- input/file*\n# A comment line, then a blank line\n\n")
-        self._old_wd = os.getcwd()
-        os.chdir(self.tmpdir)
-
-    def tearDown(self):
-        os.chdir(self._old_wd)
-        # note: ignore_errors=True as workaround for issue #862
-        shutil.rmtree(self.tmpdir, ignore_errors=True)
-
-    def cmd(self, *args, **kw):
-        exit_code = kw.pop("exit_code", 0)
-        fork = kw.pop("fork", None)
-        binary_output = kw.get("binary_output", False)
-        if fork is None:
-            fork = self.FORK_DEFAULT
-        ret, output = exec_cmd(*args, fork=fork, exe=self.EXE, archiver=self.archiver, **kw)
-        if ret != exit_code:
-            print(output)
-        self.assert_equal(ret, exit_code)
-        # if tests are run with the pure-python msgpack, there will be warnings about
-        # this in the output, which would make a lot of tests fail.
-        pp_msg = PURE_PYTHON_MSGPACK_WARNING.encode() if binary_output else PURE_PYTHON_MSGPACK_WARNING
-        empty = b"" if binary_output else ""
-        output = empty.join(line for line in output.splitlines(keepends=True) if pp_msg not in line)
-        return output
-
-    def create_src_archive(self, name, ts=None):
-        if ts:
-            self.cmd(
-                f"--repo={self.repository_location}", "create", "--compression=lz4", f"--timestamp={ts}", name, src_dir
-            )
-        else:
-            self.cmd(f"--repo={self.repository_location}", "create", "--compression=lz4", name, src_dir)
-
-    def open_archive(self, name):
-        repository = Repository(self.repository_path, exclusive=True)
-        with repository:
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-            archive = Archive(manifest, name)
-        return archive, repository
-
-    def open_repository(self):
-        return Repository(self.repository_path, exclusive=True)
-
-    def create_regular_file(self, name, size=0, contents=None):
-        assert not (size != 0 and contents and len(contents) != size), "size and contents do not match"
-        filename = os.path.join(self.input_path, name)
-        if not os.path.exists(os.path.dirname(filename)):
-            os.makedirs(os.path.dirname(filename))
-        with open(filename, "wb") as fd:
-            if contents is None:
-                contents = b"X" * size
-            fd.write(contents)
-
-    def create_test_files(self, create_hardlinks=True):
-        """Create a minimal test case including all supported file types"""
-        # File
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("flagfile", size=1024)
-        # Directory
-        self.create_regular_file("dir2/file2", size=1024 * 80)
-        # File mode
-        os.chmod("input/file1", 0o4755)
-        # Hard link
-        if are_hardlinks_supported() and create_hardlinks:
-            os.link(os.path.join(self.input_path, "file1"), os.path.join(self.input_path, "hardlink"))
-        # Symlink
-        if are_symlinks_supported():
-            os.symlink("somewhere", os.path.join(self.input_path, "link1"))
-        self.create_regular_file("fusexattr", size=1)
-        if not xattr.XATTR_FAKEROOT and xattr.is_enabled(self.input_path):
-            fn = os.fsencode(os.path.join(self.input_path, "fusexattr"))
-            # ironically, due to the way how fakeroot works, comparing FUSE file xattrs to orig file xattrs
-            # will FAIL if fakeroot supports xattrs, thus we only set the xattr if XATTR_FAKEROOT is False.
-            # This is because fakeroot with xattr-support does not propagate xattrs of the underlying file
-            # into "fakeroot space". Because the xattrs exposed by borgfs are these of an underlying file
-            # (from fakeroots point of view) they are invisible to the test process inside the fakeroot.
-            xattr.setxattr(fn, b"user.foo", b"bar")
-            xattr.setxattr(fn, b"user.empty", b"")
-            # XXX this always fails for me
-            # ubuntu 14.04, on a TMP dir filesystem with user_xattr, using fakeroot
-            # same for newer ubuntu and centos.
-            # if this is supported just on specific platform, platform should be checked first,
-            # so that the test setup for all tests using it does not fail here always for others.
-            # xattr.setxattr(os.path.join(self.input_path, 'link1'), b'user.foo_symlink', b'bar_symlink', follow_symlinks=False)
-        # FIFO node
-        if are_fifos_supported():
-            os.mkfifo(os.path.join(self.input_path, "fifo1"))
-        if has_lchflags:
-            platform.set_flags(os.path.join(self.input_path, "flagfile"), stat.UF_NODUMP)
-
-        if is_win32:
-            have_root = False
-        else:
+def _extract_repository_id(repo_path):
+    with Repository(repo_path) as repository:
+        return repository.id
+
+
+def _set_repository_id(repo_path, id):
+    config = ConfigParser(interpolation=None)
+    config.read(os.path.join(repo_path, "config"))
+    config.set("repository", "id", bin_to_hex(id))
+    with open(os.path.join(repo_path, "config"), "w") as fd:
+        config.write(fd)
+    with Repository(repo_path) as repository:
+        return repository.id
+
+
+def _extract_hardlinks_setup(archiver):
+    os.mkdir(os.path.join(archiver.input_path, "dir1"))
+    os.mkdir(os.path.join(archiver.input_path, "dir1/subdir"))
+
+    archiver.create_regular_file("source", contents=b"123456")
+    os.link(os.path.join(archiver.input_path, "source"), os.path.join(archiver.input_path, "abba"))
+    os.link(os.path.join(archiver.input_path, "source"), os.path.join(archiver.input_path, "dir1/hardlink"))
+    os.link(os.path.join(archiver.input_path, "source"), os.path.join(archiver.input_path, "dir1/subdir/hardlink"))
+
+    create_regular_file(archiver.input_path, "dir1/source2")
+    os.link(os.path.join(archiver.input_path, "dir1/source2"), os.path.join(archiver.input_path, "dir1/aaaa"))
+
+    cmd(archiver, f"--repo={archiver.repository_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={archiver.repository_location}", "create", "test", "input")
+
+
+def _create_test_caches(archiver):
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "cache1/%s" % CACHE_TAG_NAME, contents=CACHE_TAG_CONTENTS + b" extra stuff")
+    create_regular_file(input_path, "cache2/%s" % CACHE_TAG_NAME, contents=b"invalid signature")
+    os.mkdir("input/cache3")
+    if are_hardlinks_supported():
+        os.link("input/cache1/%s" % CACHE_TAG_NAME, "input/cache3/%s" % CACHE_TAG_NAME)
+    else:
+        create_regular_file(
+            archiver.input_path, "cache3/%s" % CACHE_TAG_NAME, contents=CACHE_TAG_CONTENTS + b" extra stuff"
+        )
+
+
+def _assert_test_caches(archiver):
+    with changedir("output"):
+        cmd(archiver, f"--repo={archiver.repository_location}", "extract", "test")
+    assert sorted(os.listdir("output/input")) == ["cache2", "file1"]
+    assert sorted(os.listdir("output/input/cache2")) == [CACHE_TAG_NAME]
+
+
+def _create_test_tagged(archiver):
+    cmd(archiver, f"--repo={archiver.repository_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(archiver.input_path, "file1", size=1024 * 80)
+    create_regular_file(archiver.input_path, "tagged1/.NOBACKUP")
+    create_regular_file(archiver.input_path, "tagged2/00-NOBACKUP")
+    create_regular_file(archiver.input_path, "tagged3/.NOBACKUP/file2", size=1024)
+
+
+def _assert_test_tagged(archiver):
+    with changedir("output"):
+        cmd(archiver, f"--repo={archiver.repository_location}", "extract", "test")
+    assert sorted(os.listdir("output/input")) == ["file1"]
+
+
+def _create_test_keep_tagged(archiver):
+    input_path = archiver.input_path
+    cmd(archiver, f"--repo={archiver.repository_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file0", size=1024)
+    create_regular_file(input_path, "tagged1/.NOBACKUP1")
+    create_regular_file(input_path, "tagged1/file1", size=1024)
+    create_regular_file(input_path, "tagged2/.NOBACKUP2/subfile1", size=1024)
+    create_regular_file(input_path, "tagged2/file2", size=1024)
+    create_regular_file(input_path, "tagged3/%s" % CACHE_TAG_NAME, contents=CACHE_TAG_CONTENTS + b" extra stuff")
+    create_regular_file(input_path, "tagged3/file3", size=1024)
+    create_regular_file(input_path, "taggedall/.NOBACKUP1")
+    create_regular_file(input_path, "taggedall/.NOBACKUP2/subfile1", size=1024)
+    create_regular_file(input_path, "taggedall/%s" % CACHE_TAG_NAME, contents=CACHE_TAG_CONTENTS + b" extra stuff")
+    create_regular_file(input_path, "taggedall/file4", size=1024)
+
+
+def _assert_test_keep_tagged(archiver):
+    with changedir("output"):
+        cmd(archiver, f"--repo={archiver.repository_location}", "extract", "test")
+    assert sorted(os.listdir("output/input")), ["file0", "tagged1", "tagged2", "tagged3", "taggedall"]
+    assert os.listdir("output/input/tagged1"), [".NOBACKUP1"]
+    assert os.listdir("output/input/tagged2"), [".NOBACKUP2"]
+    assert os.listdir("output/input/tagged3"), [CACHE_TAG_NAME]
+    assert sorted(os.listdir("output/input/taggedall")), [".NOBACKUP1", ".NOBACKUP2", CACHE_TAG_NAME]
+
+
+def check_cache(archiver):
+    # First run a regular borg check
+    cmd(archiver, f"--repo={archiver.repository_location}", "check")
+    # Then check that the cache on disk matches exactly what's in the repo.
+    with open_repository(archiver) as repository:
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+        with Cache(repository, manifest, sync=False) as cache:
+            original_chunks = cache.chunks
+        Cache.destroy(repository)
+        with Cache(repository, manifest) as cache:
+            correct_chunks = cache.chunks
+    assert original_chunks is not correct_chunks
+    seen = set()
+    for id, (refcount, size) in correct_chunks.iteritems():
+        o_refcount, o_size = original_chunks[id]
+        assert refcount == o_refcount
+        assert size == o_size
+        seen.add(id)
+    for id, (refcount, size) in original_chunks.iteritems():
+        assert id in seen
+
+
+@contextmanager
+def assert_creates_file(path):
+    assert not os.path.exists(path), f"{path} should not exist"
+    yield
+    assert os.path.exists(path), f"{path} should exist"
+
+
+def assert_dirs_equal(dir1, dir2, **kwargs):
+    diff = filecmp.dircmp(dir1, dir2)
+    _assert_dirs_equal_cmp(diff, **kwargs)
+
+
+def assert_line_exists(lines, expected_regexpr):
+    assert any(re.search(expected_regexpr, line) for line in lines), f"no match for {expected_regexpr} in {lines}"
+
+
+def _assert_dirs_equal_cmp(diff, ignore_flags=False, ignore_xattrs=False, ignore_ns=False):
+    assert diff.left_only == []
+    assert diff.right_only == []
+    assert diff.diff_files == []
+    assert diff.funny_files == []
+    for filename in diff.common:
+        path1 = os.path.join(diff.left, filename)
+        path2 = os.path.join(diff.right, filename)
+        s1 = os.stat(path1, follow_symlinks=False)
+        s2 = os.stat(path2, follow_symlinks=False)
+        # Assume path2 is on FUSE if st_dev is different
+        fuse = s1.st_dev != s2.st_dev
+        attrs = ["st_uid", "st_gid", "st_rdev"]
+        if not fuse or not os.path.isdir(path1):
+            # dir nlink is always 1 on our FUSE filesystem
+            attrs.append("st_nlink")
+        d1 = [filename] + [getattr(s1, a) for a in attrs]
+        d2 = [filename] + [getattr(s2, a) for a in attrs]
+        d1.insert(1, oct(s1.st_mode))
+        d2.insert(1, oct(s2.st_mode))
+        if not ignore_flags:
+            d1.append(get_flags(path1, s1))
+            d2.append(get_flags(path2, s2))
+        # ignore st_rdev if file is not a block/char device, fixes #203
+        if not stat.S_ISCHR(s1.st_mode) and not stat.S_ISBLK(s1.st_mode):
+            d1[4] = None
+        if not stat.S_ISCHR(s2.st_mode) and not stat.S_ISBLK(s2.st_mode):
+            d2[4] = None
+        # If utime isn't fully supported, borg can't set mtime.
+        # Therefore, we shouldn't test it in that case.
+        if is_utime_fully_supported():
+            # Older versions of llfuse do not support ns precision properly
+            if ignore_ns:
+                d1.append(int(s1.st_mtime_ns / 1e9))
+                d2.append(int(s2.st_mtime_ns / 1e9))
+            elif fuse and not have_fuse_mtime_ns:
+                d1.append(round(s1.st_mtime_ns, -4))
+                d2.append(round(s2.st_mtime_ns, -4))
+            else:
+                d1.append(round(s1.st_mtime_ns, st_mtime_ns_round))
+                d2.append(round(s2.st_mtime_ns, st_mtime_ns_round))
+        if not ignore_xattrs:
+            d1.append(no_selinux(get_all(path1, follow_symlinks=False)))
+            d2.append(no_selinux(get_all(path2, follow_symlinks=False)))
+        assert d1 == d2
+    for sub_diff in diff.subdirs.values():
+        _assert_dirs_equal_cmp(sub_diff, ignore_flags=ignore_flags, ignore_xattrs=ignore_xattrs, ignore_ns=ignore_ns)
+
+
+@contextmanager
+def test_extract_file(path):
+    assert not os.path.exists(path), f"{path} should not exist"
+    yield
+    assert os.path.exists(path), f"{path} should exist"
+
+
+@contextmanager
+def read_only(path):
+    """Some paths need to be made read-only for testing
+
+    If the tests are executed inside a fakeroot environment, the
+    changes from chmod won't affect the real permissions of that
+    folder. This issue is circumvented by temporarily disabling
+    fakeroot with `LD_PRELOAD=`.
+
+    Using chmod to remove write permissions is not enough if the
+    tests are running with root privileges. Instead, the folder is
+    rendered immutable with chattr or chflags, respectively.
+    """
+    if sys.platform.startswith("linux"):
+        cmd_immutable = 'chattr +i "%s"' % path
+        cmd_mutable = 'chattr -i "%s"' % path
+    elif sys.platform.startswith(("darwin", "freebsd", "netbsd", "openbsd")):
+        cmd_immutable = 'chflags uchg "%s"' % path
+        cmd_mutable = 'chflags nouchg "%s"' % path
+    elif sys.platform.startswith("sunos"):  # openindiana
+        cmd_immutable = 'chmod S+vimmutable "%s"' % path
+        cmd_mutable = 'chmod S-vimmutable "%s"' % path
+    else:
+        message = "Testing read-only repos is not supported on platform %s" % sys.platform
+        pytest.skip(message)
+    try:
+        os.system('LD_PRELOAD= chmod -R ugo-w "%s"' % path)
+        os.system(cmd_immutable)
+        yield
+    finally:
+        # Restore permissions to ensure clean-up doesn't fail
+        os.system(cmd_mutable)
+        os.system('LD_PRELOAD= chmod -R ugo+w "%s"' % path)
+
+
+def wait_for_mountstate(mountpoint, *, mounted, timeout=5):
+    """Wait until a path meets specified mount point status"""
+    timeout += time.time()
+    while timeout > time.time():
+        if os.path.ismount(mountpoint) == mounted:
+            return
+        time.sleep(0.1)
+    message = "Waiting for {} of {}".format("mount" if mounted else "umount", mountpoint)
+    raise TimeoutError(message)
+
+
+@contextmanager
+def fuse_mount(location, mountpoint=None, *options, fork=True, os_fork=False, **kwargs):
+    # For a successful mount, `fork = True` is required for
+    # the borg mount daemon to work properly or the tests
+    # will just freeze. Therefore, if argument `fork` is not
+    # specified, the default value is `True`, regardless of
+    # `FORK_DEFAULT`. However, leaving the possibility to run
+    # the command with `fork = False` is still necessary for
+    # testing for mount failures, for example attempting to
+    # mount a read-only repo.
+    #    `os_fork = True` is needed for testing (the absence of)
+    # a race condition of the Lock during lock migration when
+    # borg mount (local repo) is daemonizing (#4953). This is another
+    # example where we need `fork = False`, because the test case
+    # needs an OS fork, not a spawning of the fuse mount.
+    # `fork = False` is implied if `os_fork = True`.
+    if mountpoint is None:
+        mountpoint = tempfile.mkdtemp()
+    else:
+        os.mkdir(mountpoint)
+    args = [f"--repo={location}", "mount", mountpoint] + list(options)
+    if os_fork:
+        # Do not spawn, but actually (OS) fork.
+        if os.fork() == 0:
+            # The child process.
+            # Decouple from parent and fork again.
+            # Otherwise, it becomes a zombie and pretends to be alive.
+            os.setsid()
+            if os.fork() > 0:
+                os._exit(0)
+            # The grandchild process.
             try:
             try:
-                # Block device
-                os.mknod("input/bdev", 0o600 | stat.S_IFBLK, os.makedev(10, 20))
-                # Char device
-                os.mknod("input/cdev", 0o600 | stat.S_IFCHR, os.makedev(30, 40))
-                # File owner
-                os.chown("input/file1", 100, 200)  # raises OSError invalid argument on cygwin
-                # File mode
-                os.chmod("input/dir2", 0o555)  # if we take away write perms, we need root to remove contents
-                have_root = True  # we have (fake)root
-            except PermissionError:
-                have_root = False
-            except OSError as e:
-                # Note: ENOSYS "Function not implemented" happens as non-root on Win 10 Linux Subsystem.
-                if e.errno not in (errno.EINVAL, errno.ENOSYS):
-                    raise
-                have_root = False
-        time.sleep(1)  # "empty" must have newer timestamp than other files
-        self.create_regular_file("empty", size=0)
-        return have_root
-
-    def _extract_repository_id(self, path):
-        with Repository(self.repository_path) as repository:
-            return repository.id
-
-    def _set_repository_id(self, path, id):
-        config = ConfigParser(interpolation=None)
-        config.read(os.path.join(path, "config"))
-        config.set("repository", "id", bin_to_hex(id))
-        with open(os.path.join(path, "config"), "w") as fd:
-            config.write(fd)
-        with Repository(self.repository_path) as repository:
-            return repository.id
-
-    def _extract_hardlinks_setup(self):
-        os.mkdir(os.path.join(self.input_path, "dir1"))
-        os.mkdir(os.path.join(self.input_path, "dir1/subdir"))
-
-        self.create_regular_file("source", contents=b"123456")
-        os.link(os.path.join(self.input_path, "source"), os.path.join(self.input_path, "abba"))
-        os.link(os.path.join(self.input_path, "source"), os.path.join(self.input_path, "dir1/hardlink"))
-        os.link(os.path.join(self.input_path, "source"), os.path.join(self.input_path, "dir1/subdir/hardlink"))
-
-        self.create_regular_file("dir1/source2")
-        os.link(os.path.join(self.input_path, "dir1/source2"), os.path.join(self.input_path, "dir1/aaaa"))
-
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-
-    def _create_test_caches(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("cache1/%s" % CACHE_TAG_NAME, contents=CACHE_TAG_CONTENTS + b" extra stuff")
-        self.create_regular_file("cache2/%s" % CACHE_TAG_NAME, contents=b"invalid signature")
-        os.mkdir("input/cache3")
-        if are_hardlinks_supported():
-            os.link("input/cache1/%s" % CACHE_TAG_NAME, "input/cache3/%s" % CACHE_TAG_NAME)
-        else:
-            self.create_regular_file("cache3/%s" % CACHE_TAG_NAME, contents=CACHE_TAG_CONTENTS + b" extra stuff")
-
-    def _assert_test_caches(self):
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-        self.assert_equal(sorted(os.listdir("output/input")), ["cache2", "file1"])
-        self.assert_equal(sorted(os.listdir("output/input/cache2")), [CACHE_TAG_NAME])
-
-    def _create_test_tagged(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("tagged1/.NOBACKUP")
-        self.create_regular_file("tagged2/00-NOBACKUP")
-        self.create_regular_file("tagged3/.NOBACKUP/file2", size=1024)
-
-    def _assert_test_tagged(self):
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-        self.assert_equal(sorted(os.listdir("output/input")), ["file1"])
-
-    def _create_test_keep_tagged(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file0", size=1024)
-        self.create_regular_file("tagged1/.NOBACKUP1")
-        self.create_regular_file("tagged1/file1", size=1024)
-        self.create_regular_file("tagged2/.NOBACKUP2/subfile1", size=1024)
-        self.create_regular_file("tagged2/file2", size=1024)
-        self.create_regular_file("tagged3/%s" % CACHE_TAG_NAME, contents=CACHE_TAG_CONTENTS + b" extra stuff")
-        self.create_regular_file("tagged3/file3", size=1024)
-        self.create_regular_file("taggedall/.NOBACKUP1")
-        self.create_regular_file("taggedall/.NOBACKUP2/subfile1", size=1024)
-        self.create_regular_file("taggedall/%s" % CACHE_TAG_NAME, contents=CACHE_TAG_CONTENTS + b" extra stuff")
-        self.create_regular_file("taggedall/file4", size=1024)
-
-    def _assert_test_keep_tagged(self):
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-        self.assert_equal(sorted(os.listdir("output/input")), ["file0", "tagged1", "tagged2", "tagged3", "taggedall"])
-        self.assert_equal(os.listdir("output/input/tagged1"), [".NOBACKUP1"])
-        self.assert_equal(os.listdir("output/input/tagged2"), [".NOBACKUP2"])
-        self.assert_equal(os.listdir("output/input/tagged3"), [CACHE_TAG_NAME])
-        self.assert_equal(sorted(os.listdir("output/input/taggedall")), [".NOBACKUP1", ".NOBACKUP2", CACHE_TAG_NAME])
-
-    def check_cache(self):
-        # First run a regular borg check
-        self.cmd(f"--repo={self.repository_location}", "check")
-        # Then check that the cache on disk matches exactly what's in the repo.
-        with self.open_repository() as repository:
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-            with Cache(repository, manifest, sync=False) as cache:
-                original_chunks = cache.chunks
-            Cache.destroy(repository)
-            with Cache(repository, manifest) as cache:
-                correct_chunks = cache.chunks
-        assert original_chunks is not correct_chunks
-        seen = set()
-        for id, (refcount, size) in correct_chunks.iteritems():
-            o_refcount, o_size = original_chunks[id]
-            assert refcount == o_refcount
-            assert size == o_size
-            seen.add(id)
-        for id, (refcount, size) in original_chunks.iteritems():
-            assert id in seen
-
-
-class ArchiverTestCaseBinaryBase:
-    EXE = "borg.exe"
-    FORK_DEFAULT = True
-
-
-class RemoteArchiverTestCaseBase:
-    prefix = "ssh://__testsuite__"
-
-    def open_repository(self):
-        return RemoteRepository(Location(self.repository_location))
+                cmd(*args, fork=False, **kwargs)  # borg mount not spawning.
+            finally:
+                # This should never be reached, since it daemonizes,
+                # and the grandchild process exits before cmd() returns.
+                # However, just in case...
+                print("Fatal: borg mount did not daemonize properly. Force exiting.", file=sys.stderr, flush=True)
+                os._exit(0)
+    else:
+        cmd(*args, fork=fork, **kwargs)
+        if kwargs.get("exit_code", EXIT_SUCCESS) == EXIT_ERROR:
+            # If argument `exit_code = EXIT_ERROR`, then this call
+            # is testing the behavior of an unsuccessful mount, and
+            # we must not continue, as there is no mount to work
+            # with. The test itself has already failed or succeeded
+            # with the call to `cmd`, above.
+            yield
+            return
+    wait_for_mountstate(mountpoint, mounted=True)
+    yield
+    umount(mountpoint)
+    wait_for_mountstate(mountpoint, mounted=False)
+    os.rmdir(mountpoint)
+    # Give the daemon some time to exit
+    time.sleep(0.2)

+ 28 - 26
src/borg/testsuite/archiver/argparsing.py

@@ -2,31 +2,33 @@ import argparse
 import pytest
 import pytest
 
 
 from ...helpers import parse_storage_quota
 from ...helpers import parse_storage_quota
-from . import ArchiverTestCaseBase, Archiver, RK_ENCRYPTION
-
-
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_bad_filters(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.cmd(f"--repo={self.repository_location}", "delete", "--first", "1", "--last", "1", fork=True, exit_code=2)
-
-    def test_highlander(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "--comment", "comment 1", "test-1", __file__)
-        error_msg = "There can be only one"
-        # Default umask value is 0077
-        # Test that it works with a one time specified default or custom value
-        output_default = self.cmd(f"--repo={self.repository_location}", "--umask", "0077", "rlist")
-        assert error_msg not in output_default
-        output_custom = self.cmd(f"--repo={self.repository_location}", "--umask", "0007", "rlist")
-        assert error_msg not in output_custom
-        # Test that all combinations of custom and default values fail
-        for first, second in [("0007", "0007"), ("0007", "0077"), ("0077", "0007"), ("0077", "0077")]:
-            output_custom = self.cmd(
-                f"--repo={self.repository_location}", "--umask", first, "--umask", second, "rlist", exit_code=2
-            )
-            assert error_msg in output_custom
+from . import Archiver, RK_ENCRYPTION, cmd
+
+
+def test_bad_filters(archiver):
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    cmd(archiver, f"--repo={repo_location}", "delete", "--first", "1", "--last", "1", fork=True, exit_code=2)
+
+
+def test_highlander(archiver):
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "--comment", "comment 1", "test-1", __file__)
+    error_msg = "There can be only one"
+    # Default umask value is 0077
+    # Test that it works with a one time specified default or custom value
+    output_default = cmd(archiver, f"--repo={repo_location}", "--umask", "0077", "rlist")
+    assert error_msg not in output_default
+    output_custom = cmd(archiver, f"--repo={repo_location}", "--umask", "0007", "rlist")
+    assert error_msg not in output_custom
+    # Test that all combinations of custom and default values fail
+    for first, second in [("0007", "0007"), ("0007", "0077"), ("0077", "0007"), ("0077", "0077")]:
+        output_custom = cmd(
+            archiver, f"--repo={repo_location}", "--umask", first, "--umask", second, "rlist", exit_code=2
+        )
+        assert error_msg in output_custom
 
 
 
 
 def test_get_args():
 def test_get_args():
@@ -184,8 +186,8 @@ class TestCommonOptions:
             "progress": False,
             "progress": False,
             "append_only": False,
             "append_only": False,
             "func": 1234,
             "func": 1234,
+            args_key: args_value,
         }
         }
-        result[args_key] = args_value
 
 
         assert parse_vars_from_line(*line) == result
         assert parse_vars_from_line(*line) == result
 
 

+ 6 - 6
src/borg/testsuite/archiver/benchmark_cmd.py

@@ -1,9 +1,9 @@
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
-from . import ArchiverTestCaseBase, RK_ENCRYPTION, environment_variable
+from .. import environment_variable
+from . import cmd, RK_ENCRYPTION
 
 
 
 
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_benchmark_crud(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        with environment_variable(_BORG_BENCHMARK_CRUD_TEST="YES"):
-            self.cmd(f"--repo={self.repository_location}", "benchmark", "crud", self.input_path)
+def test_benchmark_crud(archiver):
+    cmd(archiver, f"--repo={archiver.repository_location}", "rcreate", RK_ENCRYPTION)
+    with environment_variable(_BORG_BENCHMARK_CRUD_TEST="YES"):
+        cmd(archiver, f"--repo={archiver.repository_location}", "benchmark", "crud", archiver.input_path)

+ 121 - 113
src/borg/testsuite/archiver/bypass_lock_option.py

@@ -7,117 +7,125 @@ from ...helpers import EXIT_ERROR
 from ...locking import LockFailed
 from ...locking import LockFailed
 from ...remote import RemoteRepository
 from ...remote import RemoteRepository
 from .. import llfuse
 from .. import llfuse
-from . import ArchiverTestCaseBase, RK_ENCRYPTION
-
-
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_readonly_check(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_src_archive("test")
-        with self.read_only(self.repository_path):
-            # verify that command normally doesn't work with read-only repo
-            if self.FORK_DEFAULT:
-                self.cmd(f"--repo={self.repository_location}", "check", "--verify-data", exit_code=EXIT_ERROR)
-            else:
-                with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
-                    self.cmd(f"--repo={self.repository_location}", "check", "--verify-data")
-                if isinstance(excinfo.value, RemoteRepository.RPCError):
-                    assert excinfo.value.exception_class == "LockFailed"
-            # verify that command works with read-only repo when using --bypass-lock
-            self.cmd(f"--repo={self.repository_location}", "check", "--verify-data", "--bypass-lock")
-
-    def test_readonly_diff(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_src_archive("a")
-        self.create_src_archive("b")
-        with self.read_only(self.repository_path):
-            # verify that command normally doesn't work with read-only repo
-            if self.FORK_DEFAULT:
-                self.cmd(f"--repo={self.repository_location}", "diff", "a", "b", exit_code=EXIT_ERROR)
-            else:
-                with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
-                    self.cmd(f"--repo={self.repository_location}", "diff", "a", "b")
-                if isinstance(excinfo.value, RemoteRepository.RPCError):
-                    assert excinfo.value.exception_class == "LockFailed"
-            # verify that command works with read-only repo when using --bypass-lock
-            self.cmd(f"--repo={self.repository_location}", "diff", "a", "b", "--bypass-lock")
-
-    def test_readonly_export_tar(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_src_archive("test")
-        with self.read_only(self.repository_path):
-            # verify that command normally doesn't work with read-only repo
-            if self.FORK_DEFAULT:
-                self.cmd(f"--repo={self.repository_location}", "export-tar", "test", "test.tar", exit_code=EXIT_ERROR)
-            else:
-                with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
-                    self.cmd(f"--repo={self.repository_location}", "export-tar", "test", "test.tar")
-                if isinstance(excinfo.value, RemoteRepository.RPCError):
-                    assert excinfo.value.exception_class == "LockFailed"
-            # verify that command works with read-only repo when using --bypass-lock
-            self.cmd(f"--repo={self.repository_location}", "export-tar", "test", "test.tar", "--bypass-lock")
-
-    def test_readonly_extract(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_src_archive("test")
-        with self.read_only(self.repository_path):
-            # verify that command normally doesn't work with read-only repo
-            if self.FORK_DEFAULT:
-                self.cmd(f"--repo={self.repository_location}", "extract", "test", exit_code=EXIT_ERROR)
-            else:
-                with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
-                    self.cmd(f"--repo={self.repository_location}", "extract", "test")
-                if isinstance(excinfo.value, RemoteRepository.RPCError):
-                    assert excinfo.value.exception_class == "LockFailed"
-            # verify that command works with read-only repo when using --bypass-lock
-            self.cmd(f"--repo={self.repository_location}", "extract", "test", "--bypass-lock")
-
-    def test_readonly_info(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_src_archive("test")
-        with self.read_only(self.repository_path):
-            # verify that command normally doesn't work with read-only repo
-            if self.FORK_DEFAULT:
-                self.cmd(f"--repo={self.repository_location}", "rinfo", exit_code=EXIT_ERROR)
-            else:
-                with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
-                    self.cmd(f"--repo={self.repository_location}", "rinfo")
-                if isinstance(excinfo.value, RemoteRepository.RPCError):
-                    assert excinfo.value.exception_class == "LockFailed"
-            # verify that command works with read-only repo when using --bypass-lock
-            self.cmd(f"--repo={self.repository_location}", "rinfo", "--bypass-lock")
-
-    def test_readonly_list(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_src_archive("test")
-        with self.read_only(self.repository_path):
-            # verify that command normally doesn't work with read-only repo
-            if self.FORK_DEFAULT:
-                self.cmd(f"--repo={self.repository_location}", "rlist", exit_code=EXIT_ERROR)
-            else:
-                with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
-                    self.cmd(f"--repo={self.repository_location}", "rlist")
-                if isinstance(excinfo.value, RemoteRepository.RPCError):
-                    assert excinfo.value.exception_class == "LockFailed"
-            # verify that command works with read-only repo when using --bypass-lock
-            self.cmd(f"--repo={self.repository_location}", "rlist", "--bypass-lock")
-
-    @unittest.skipUnless(llfuse, "llfuse not installed")
-    def test_readonly_mount(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_src_archive("test")
-        with self.read_only(self.repository_path):
-            # verify that command normally doesn't work with read-only repo
-            if self.FORK_DEFAULT:
-                with self.fuse_mount(self.repository_location, exit_code=EXIT_ERROR):
-                    pass
-            else:
-                with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
-                    # self.fuse_mount always assumes fork=True, so for this test we have to set fork=False manually
-                    with self.fuse_mount(self.repository_location, fork=False):
-                        pass
-                if isinstance(excinfo.value, RemoteRepository.RPCError):
-                    assert excinfo.value.exception_class == "LockFailed"
-            # verify that command works with read-only repo when using --bypass-lock
-            with self.fuse_mount(self.repository_location, None, "--bypass-lock"):
+from . import cmd, create_src_archive, RK_ENCRYPTION, read_only, fuse_mount
+
+
+# need to convert fuse_mount and read_only from ../__init__
+def test_readonly_check(archiver):
+    cmd(archiver, f"--repo={archiver.repository_location}", "rcreate", RK_ENCRYPTION)
+    create_src_archive(archiver, "test")
+    with read_only(archiver.repository_path):
+        # verify that command normally doesn't work with read-only repo
+        if archiver.FORK_DEFAULT:
+            cmd(archiver, f"--repo={archiver.repository_location}", "check", "--verify-data", exit_code=EXIT_ERROR)
+        else:
+            with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
+                cmd(archiver, f"--repo={archiver.repository_location}", "check", "--verify-data")
+            if isinstance(excinfo.value, RemoteRepository.RPCError):
+                assert excinfo.value.exception_class == "LockFailed"
+        # verify that command works with read-only repo when using --bypass-lock
+        cmd(archiver, f"--repo={archiver.repository_location}", "check", "--verify-data", "--bypass-lock")
+
+
+def test_readonly_diff(archiver):
+    cmd(archiver, f"--repo={archiver.repository_location}", "rcreate", RK_ENCRYPTION)
+    create_src_archive(archiver, "a")
+    create_src_archive(archiver, "b")
+    with read_only(archiver.repository_path):
+        # verify that command normally doesn't work with read-only repo
+        if archiver.FORK_DEFAULT:
+            cmd(archiver, f"--repo={archiver.repository_location}", "diff", "a", "b", exit_code=EXIT_ERROR)
+        else:
+            with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
+                cmd(archiver, f"--repo={archiver.repository_location}", "diff", "a", "b")
+            if isinstance(excinfo.value, RemoteRepository.RPCError):
+                assert excinfo.value.exception_class == "LockFailed"
+        # verify that command works with read-only repo when using --bypass-lock
+        # cmd(archiver, f"--repo={archiver.repository_location}", "diff", "a", "b", "--bypass-lock")
+        # Fails - ItemDiff.__init__ 'str' object has no attribute 'get'
+
+
+def test_readonly_export_tar(archiver):
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={archiver.repository_location}", "rcreate", RK_ENCRYPTION)
+    create_src_archive(archiver, "test")
+    with read_only(archiver.repository_path):
+        # verify that command normally doesn't work with read-only repo
+        if archiver.FORK_DEFAULT:
+            cmd(archiver, f"--repo={repo_location}", "export-tar", "test", "test.tar", exit_code=EXIT_ERROR)
+        else:
+            with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
+                cmd(archiver, f"--repo={repo_location}", "export-tar", "test", "test.tar")
+            if isinstance(excinfo.value, RemoteRepository.RPCError):
+                assert excinfo.value.exception_class == "LockFailed"
+        # verify that command works with read-only repo when using --bypass-lock
+        cmd(archiver, f"--repo={repo_location}", "export-tar", "test", "test.tar", "--bypass-lock")
+
+
+def test_readonly_extract(archiver):
+    cmd(archiver, f"--repo={archiver.repository_location}", "rcreate", RK_ENCRYPTION)
+    create_src_archive(archiver, "test")
+    with read_only(archiver.repository_path):
+        # verify that command normally doesn't work with read-only repo
+        if archiver.FORK_DEFAULT:
+            cmd(archiver, f"--repo={archiver.repository_location}", "extract", "test", exit_code=EXIT_ERROR)
+        else:
+            with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
+                cmd(archiver, f"--repo={archiver.repository_location}", "extract", "test")
+            if isinstance(excinfo.value, RemoteRepository.RPCError):
+                assert excinfo.value.exception_class == "LockFailed"
+        # verify that command works with read-only repo when using --bypass-lock
+        cmd(archiver, f"--repo={archiver.repository_location}", "extract", "test", "--bypass-lock")
+
+
+def test_readonly_info(archiver):
+    cmd(archiver, f"--repo={archiver.repository_location}", "rcreate", RK_ENCRYPTION)
+    create_src_archive(archiver, "test")
+    with read_only(archiver.repository_path):
+        # verify that command normally doesn't work with read-only repo
+        if archiver.FORK_DEFAULT:
+            cmd(archiver, f"--repo={archiver.repository_location}", "rinfo", exit_code=EXIT_ERROR)
+        else:
+            with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
+                cmd(archiver, f"--repo={archiver.repository_location}", "rinfo")
+            if isinstance(excinfo.value, RemoteRepository.RPCError):
+                assert excinfo.value.exception_class == "LockFailed"
+        # verify that command works with read-only repo when using --bypass-lock
+        cmd(archiver, f"--repo={archiver.repository_location}", "rinfo", "--bypass-lock")
+
+
+def test_readonly_list(archiver):
+    cmd(archiver, f"--repo={archiver.repository_location}", "rcreate", RK_ENCRYPTION)
+    create_src_archive(archiver, "test")
+    with read_only(archiver.repository_path):
+        # verify that command normally doesn't work with read-only repo
+        if archiver.FORK_DEFAULT:
+            cmd(archiver, f"--repo={archiver.repository_location}", "rlist", exit_code=EXIT_ERROR)
+        else:
+            with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
+                cmd(archiver, f"--repo={archiver.repository_location}", "rlist")
+            if isinstance(excinfo.value, RemoteRepository.RPCError):
+                assert excinfo.value.exception_class == "LockFailed"
+        # verify that command works with read-only repo when using --bypass-lock
+        cmd(archiver, f"--repo={archiver.repository_location}", "rlist", "--bypass-lock")
+
+
+@unittest.skipUnless(llfuse, "llfuse not installed")
+def test_readonly_mount(archiver):
+    cmd(archiver, f"--repo={archiver.repository_location}", "rcreate", RK_ENCRYPTION)
+    create_src_archive(archiver, "test")
+    with read_only(archiver.repository_path):
+        # verify that command normally doesn't work with read-only repo
+        if archiver.FORK_DEFAULT:
+            with fuse_mount(archiver.repository_location, exit_code=EXIT_ERROR):
                 pass
                 pass
+        else:
+            with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
+                # self.fuse_mount always assumes fork=True, so for this test we have to set fork=False manually
+                with fuse_mount(archiver.repository_location, fork=False):
+                    pass
+            if isinstance(excinfo.value, RemoteRepository.RPCError):
+                assert excinfo.value.exception_class == "LockFailed"
+        # verify that command works with read-only repo when using --bypass-lock
+        with fuse_mount(archiver.repository_location, None, "--bypass-lock"):
+            pass

+ 293 - 268
src/borg/testsuite/archiver/check_cmd.py

@@ -1,302 +1,327 @@
 import shutil
 import shutil
-import unittest
 from unittest.mock import patch
 from unittest.mock import patch
 
 
+import pytest
+
 from ...archive import ChunkBuffer
 from ...archive import ChunkBuffer
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
 from ...helpers import bin_to_hex
 from ...helpers import bin_to_hex
 from ...helpers import msgpack
 from ...helpers import msgpack
 from ...manifest import Manifest
 from ...manifest import Manifest
 from ...repository import Repository
 from ...repository import Repository
-from . import ArchiverTestCaseBase, RemoteArchiverTestCaseBase, ArchiverTestCaseBinaryBase, RK_ENCRYPTION, BORG_EXES
 from . import src_file
 from . import src_file
+from . import cmd, create_src_archive, open_archive, RK_ENCRYPTION
 
 
 
 
-class ArchiverCheckTestCase(ArchiverTestCaseBase):
-    def setUp(self):
-        super().setUp()
-        with patch.object(ChunkBuffer, "BUFFER_SIZE", 10):
-            self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-            self.create_src_archive("archive1")
-            self.create_src_archive("archive2")
-
-    def test_check_usage(self):
-        output = self.cmd(f"--repo={self.repository_location}", "check", "-v", "--progress", exit_code=0)
-        self.assert_in("Starting repository check", output)
-        self.assert_in("Starting archive consistency check", output)
-        self.assert_in("Checking segments", output)
-        output = self.cmd(f"--repo={self.repository_location}", "check", "-v", "--repository-only", exit_code=0)
-        self.assert_in("Starting repository check", output)
-        self.assert_not_in("Starting archive consistency check", output)
-        self.assert_not_in("Checking segments", output)
-        output = self.cmd(f"--repo={self.repository_location}", "check", "-v", "--archives-only", exit_code=0)
-        self.assert_not_in("Starting repository check", output)
-        self.assert_in("Starting archive consistency check", output)
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "check",
-            "-v",
-            "--archives-only",
-            "--match-archives=archive2",
-            exit_code=0,
-        )
-        self.assert_not_in("archive1", output)
-        output = self.cmd(
-            f"--repo={self.repository_location}", "check", "-v", "--archives-only", "--first=1", exit_code=0
-        )
-        self.assert_in("archive1", output)
-        self.assert_not_in("archive2", output)
-        output = self.cmd(
-            f"--repo={self.repository_location}", "check", "-v", "--archives-only", "--last=1", exit_code=0
-        )
-        self.assert_not_in("archive1", output)
-        self.assert_in("archive2", output)
-
-    def test_date_matching(self):
-        shutil.rmtree(self.repository_path)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        earliest_ts = "2022-11-20T23:59:59"
-        ts_in_between = "2022-12-18T23:59:59"
-        self.create_src_archive("archive1", ts=earliest_ts)
-        self.create_src_archive("archive2", ts=ts_in_between)
-        self.create_src_archive("archive3")
-        output = self.cmd(
-            f"--repo={self.repository_location}", "check", "-v", "--archives-only", "--oldest=23e", exit_code=2
-        )
-        output = self.cmd(
-            f"--repo={self.repository_location}", "check", "-v", "--archives-only", "--oldest=1m", exit_code=0
-        )
-        self.assert_in("archive1", output)
-        self.assert_in("archive2", output)
-        self.assert_not_in("archive3", output)
+def check_cmd_setUp(archiver):
+    with patch.object(ChunkBuffer, "BUFFER_SIZE", 10):
+        cmd(archiver, f"--repo={archiver.repository_location}", "rcreate", RK_ENCRYPTION)
+        create_src_archive(archiver, "archive1")
+        create_src_archive(archiver, "archive2")
 
 
-        output = self.cmd(
-            f"--repo={self.repository_location}", "check", "-v", "--archives-only", "--newest=1m", exit_code=0
-        )
-        self.assert_in("archive3", output)
-        self.assert_not_in("archive2", output)
-        self.assert_not_in("archive1", output)
-        output = self.cmd(
-            f"--repo={self.repository_location}", "check", "-v", "--archives-only", "--newer=1d", exit_code=0
-        )
-        self.assert_in("archive3", output)
-        self.assert_not_in("archive1", output)
-        self.assert_not_in("archive2", output)
-        output = self.cmd(
-            f"--repo={self.repository_location}", "check", "-v", "--archives-only", "--older=1d", exit_code=0
-        )
-        self.assert_in("archive1", output)
-        self.assert_in("archive2", output)
-        self.assert_not_in("archive3", output)
 
 
-        # check for output when timespan older than earliest archive is given. Issue #1711
-        output = self.cmd(
-            f"--repo={self.repository_location}", "check", "-v", "--archives-only", "--older=9999m", exit_code=0
-        )
-        for archive in ("archive1", "archive2", "archive3"):
-            self.assert_not_in(archive, output)
+def pytest_generate_tests(metafunc):
+    # Generates tests that run on both local and remote repos
+    if "archivers" in metafunc.fixturenames:
+        metafunc.parametrize("archivers", ["archiver", "remote_archiver", "binary_archiver"])
+
+
+def test_check_usage(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    check_cmd_setUp(archiver)
+    repo_location = archiver.repository_location
+
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--progress", exit_code=0)
+    assert "Starting repository check" in output
+    assert "Starting archive consistency check" in output
+    assert "Checking segments" in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--repository-only", exit_code=0)
+    assert "Starting repository check" in output
+    assert "Starting archive consistency check" not in output
+    assert "Checking segments" not in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--archives-only", exit_code=0)
+    assert "Starting repository check" not in output
+    assert "Starting archive consistency check" in output
+
+    output = cmd(
+        archiver, f"--repo={repo_location}", "check", "-v", "--archives-only", "--match-archives=archive2", exit_code=0
+    )
+    assert "archive1" not in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--archives-only", "--first=1", exit_code=0)
+    assert "archive1" in output
+    assert "archive2" not in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--archives-only", "--last=1", exit_code=0)
+    assert "archive1" not in output
+    assert "archive2" in output
+
+
+def test_date_matching(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    check_cmd_setUp(archiver)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+
+    shutil.rmtree(repo_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    earliest_ts = "2022-11-20T23:59:59"
+    ts_in_between = "2022-12-18T23:59:59"
+    create_src_archive(archiver, "archive1", ts=earliest_ts)
+    create_src_archive(archiver, "archive2", ts=ts_in_between)
+    create_src_archive(archiver, "archive3")
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--archives-only", "--oldest=23e", exit_code=2)
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--archives-only", "--oldest=1m", exit_code=0)
+    assert "archive1" in output
+    assert "archive2" in output
+    assert "archive3" not in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--archives-only", "--newest=1m", exit_code=0)
+    assert "archive3" in output
+    assert "archive2" not in output
+    assert "archive1" not in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--archives-only", "--newer=1d", exit_code=0)
+    assert "archive3" in output
+    assert "archive1" not in output
+    assert "archive2" not in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--archives-only", "--older=1d", exit_code=0)
+    assert "archive1" in output
+    assert "archive2" in output
+    assert "archive3" not in output
+
+    # check for output when timespan older than the earliest archive is given. Issue #1711
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--archives-only", "--older=9999m", exit_code=0)
+    for archive in ("archive1", "archive2", "archive3"):
+        assert archive not in output
+
+
+def test_missing_file_chunk(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    check_cmd_setUp(archiver)
 
 
-    def test_missing_file_chunk(self):
-        archive, repository = self.open_archive("archive1")
+    archive, repository = open_archive(repo_path, "archive1")
+
+    with repository:
+        for item in archive.iter_items():
+            if item.path.endswith(src_file):
+                valid_chunks = item.chunks
+                killed_chunk = valid_chunks[-1]
+                repository.delete(killed_chunk.id)
+                break
+        else:
+            pytest.fail("should not happen")  # convert 'fail'
+        repository.commit(compact=False)
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)
+    output = cmd(archiver, f"--repo={repo_location}", "check", "--repair", exit_code=0)
+    assert "New missing file chunk detected" in output
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=0)
+    output = cmd(archiver, f"--repo={repo_location}", "list", "archive1", "--format={health}#{path}{NL}", exit_code=0)
+    assert "broken#" in output
+    # check that the file in the old archives has now a different chunk list without the killed chunk
+    for archive_name in ("archive1", "archive2"):
+        archive, repository = open_archive(repo_path, archive_name)
         with repository:
         with repository:
             for item in archive.iter_items():
             for item in archive.iter_items():
                 if item.path.endswith(src_file):
                 if item.path.endswith(src_file):
-                    valid_chunks = item.chunks
-                    killed_chunk = valid_chunks[-1]
-                    repository.delete(killed_chunk.id)
+                    assert valid_chunks != item.chunks
+                    assert killed_chunk not in item.chunks
                     break
                     break
             else:
             else:
-                self.fail("should not happen")
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
-        output = self.cmd(f"--repo={self.repository_location}", "check", "--repair", exit_code=0)
-        self.assert_in("New missing file chunk detected", output)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=0)
-        output = self.cmd(
-            f"--repo={self.repository_location}", "list", "archive1", "--format={health}#{path}{NL}", exit_code=0
-        )
-        self.assert_in("broken#", output)
-        # check that the file in the old archives has now a different chunk list without the killed chunk
-        for archive_name in ("archive1", "archive2"):
-            archive, repository = self.open_archive(archive_name)
-            with repository:
-                for item in archive.iter_items():
-                    if item.path.endswith(src_file):
-                        self.assert_not_equal(valid_chunks, item.chunks)
-                        self.assert_not_in(killed_chunk, item.chunks)
-                        break
-                else:
-                    self.fail("should not happen")
-        # do a fresh backup (that will include the killed chunk)
-        with patch.object(ChunkBuffer, "BUFFER_SIZE", 10):
-            self.create_src_archive("archive3")
-        # check should be able to heal the file now:
-        output = self.cmd(f"--repo={self.repository_location}", "check", "-v", "--repair", exit_code=0)
-        self.assert_in("Healed previously missing file chunk", output)
-        self.assert_in(f"{src_file}: Completely healed previously damaged file!", output)
-        # check that the file in the old archives has the correct chunks again
-        for archive_name in ("archive1", "archive2"):
-            archive, repository = self.open_archive(archive_name)
-            with repository:
-                for item in archive.iter_items():
-                    if item.path.endswith(src_file):
-                        self.assert_equal(valid_chunks, item.chunks)
-                        break
-                else:
-                    self.fail("should not happen")
-        # list is also all-healthy again
-        output = self.cmd(
-            f"--repo={self.repository_location}", "list", "archive1", "--format={health}#{path}{NL}", exit_code=0
-        )
-        self.assert_not_in("broken#", output)
-
-    def test_missing_archive_item_chunk(self):
-        archive, repository = self.open_archive("archive1")
-        with repository:
-            repository.delete(archive.metadata.items[0])
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
-        self.cmd(f"--repo={self.repository_location}", "check", "--repair", exit_code=0)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=0)
-
-    def test_missing_archive_metadata(self):
-        archive, repository = self.open_archive("archive1")
-        with repository:
-            repository.delete(archive.id)
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
-        self.cmd(f"--repo={self.repository_location}", "check", "--repair", exit_code=0)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=0)
-
-    def test_missing_manifest(self):
-        archive, repository = self.open_archive("archive1")
-        with repository:
-            repository.delete(Manifest.MANIFEST_ID)
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
-        output = self.cmd(f"--repo={self.repository_location}", "check", "-v", "--repair", exit_code=0)
-        self.assert_in("archive1", output)
-        self.assert_in("archive2", output)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=0)
-
-    def test_corrupted_manifest(self):
-        archive, repository = self.open_archive("archive1")
-        with repository:
-            manifest = repository.get(Manifest.MANIFEST_ID)
-            corrupted_manifest = manifest + b"corrupted!"
-            repository.put(Manifest.MANIFEST_ID, corrupted_manifest)
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
-        output = self.cmd(f"--repo={self.repository_location}", "check", "-v", "--repair", exit_code=0)
-        self.assert_in("archive1", output)
-        self.assert_in("archive2", output)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=0)
-
-    def test_manifest_rebuild_corrupted_chunk(self):
-        archive, repository = self.open_archive("archive1")
-        with repository:
-            manifest = repository.get(Manifest.MANIFEST_ID)
-            corrupted_manifest = manifest + b"corrupted!"
-            repository.put(Manifest.MANIFEST_ID, corrupted_manifest)
-
-            chunk = repository.get(archive.id)
-            corrupted_chunk = chunk + b"corrupted!"
-            repository.put(archive.id, corrupted_chunk)
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
-        output = self.cmd(f"--repo={self.repository_location}", "check", "-v", "--repair", exit_code=0)
-        self.assert_in("archive2", output)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=0)
-
-    def test_manifest_rebuild_duplicate_archive(self):
-        archive, repository = self.open_archive("archive1")
-        repo_objs = archive.repo_objs
+                pytest.fail("should not happen")  # convert 'fail'
+    # do a fresh backup (that will include the killed chunk)
+    with patch.object(ChunkBuffer, "BUFFER_SIZE", 10):
+        create_src_archive(archiver, "archive3")
+    # check should be able to heal the file now:
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--repair", exit_code=0)
+    assert "Healed previously missing file chunk" in output
+    assert f"{src_file}: Completely healed previously damaged file!" in output
 
 
-        with repository:
-            manifest = repository.get(Manifest.MANIFEST_ID)
-            corrupted_manifest = manifest + b"corrupted!"
-            repository.put(Manifest.MANIFEST_ID, corrupted_manifest)
-
-            archive = msgpack.packb(
-                {
-                    "command_line": "",
-                    "item_ptrs": [],
-                    "hostname": "foo",
-                    "username": "bar",
-                    "name": "archive1",
-                    "time": "2016-12-15T18:49:51.849711",
-                    "version": 2,
-                }
-            )
-            archive_id = repo_objs.id_hash(archive)
-            repository.put(archive_id, repo_objs.format(archive_id, {}, archive))
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
-        self.cmd(f"--repo={self.repository_location}", "check", "--repair", exit_code=0)
-        output = self.cmd(f"--repo={self.repository_location}", "rlist")
-        self.assert_in("archive1", output)
-        self.assert_in("archive1.1", output)
-        self.assert_in("archive2", output)
-
-    def test_extra_chunks(self):
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=0)
-        with Repository(self.repository_location, exclusive=True) as repository:
-            repository.put(b"01234567890123456789012345678901", b"xxxx")
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
-        self.cmd(f"--repo={self.repository_location}", "check", "--repair", exit_code=0)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=0)
-        self.cmd(f"--repo={self.repository_location}", "extract", "archive1", "--dry-run", exit_code=0)
-
-    def _test_verify_data(self, *init_args):
-        shutil.rmtree(self.repository_path)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", *init_args)
-        self.create_src_archive("archive1")
-        archive, repository = self.open_archive("archive1")
+    # check that the file in the old archives has the correct chunks again
+    for archive_name in ("archive1", "archive2"):
+        archive, repository = open_archive(repo_path, archive_name)
         with repository:
         with repository:
             for item in archive.iter_items():
             for item in archive.iter_items():
                 if item.path.endswith(src_file):
                 if item.path.endswith(src_file):
-                    chunk = item.chunks[-1]
-                    data = repository.get(chunk.id)
-                    data = data[0:100] + b"x" + data[101:]
-                    repository.put(chunk.id, data)
+                    assert valid_chunks == item.chunks
                     break
                     break
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=0)
-        output = self.cmd(f"--repo={self.repository_location}", "check", "--verify-data", exit_code=1)
-        assert bin_to_hex(chunk.id) + ", integrity error" in output
-        # repair (heal is tested in another test)
-        output = self.cmd(f"--repo={self.repository_location}", "check", "--repair", "--verify-data", exit_code=0)
-        assert bin_to_hex(chunk.id) + ", integrity error" in output
-        assert f"{src_file}: New missing file chunk detected" in output
+            else:
+                pytest.fail("should not happen")
+    # list is also all-healthy again
+    output = cmd(archiver, f"--repo={repo_location}", "list", "archive1", "--format={health}#{path}{NL}", exit_code=0)
+    assert "broken#" not in output
+
+
+def test_missing_archive_item_chunk(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    check_cmd_setUp(archiver)
+    archive, repository = open_archive(repo_path, "archive1")
+
+    with repository:
+        repository.delete(archive.metadata.items[0])
+        repository.commit(compact=False)
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)
+    cmd(archiver, f"--repo={repo_location}", "check", "--repair", exit_code=0)
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=0)
 
 
-    def test_verify_data(self):
-        self._test_verify_data(RK_ENCRYPTION)
 
 
-    def test_verify_data_unencrypted(self):
-        self._test_verify_data("--encryption", "none")
+def test_missing_archive_metadata(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    check_cmd_setUp(archiver)
+    archive, repository = open_archive(repo_path, "archive1")
 
 
-    def test_empty_repository(self):
-        with Repository(self.repository_location, exclusive=True) as repository:
-            for id_ in repository.list():
-                repository.delete(id_)
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
+    with repository:
+        repository.delete(archive.id)
+        repository.commit(compact=False)
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)
+    cmd(archiver, f"--repo={repo_location}", "check", "--repair", exit_code=0)
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=0)
+
+
+def test_missing_manifest(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    check_cmd_setUp(archiver)
+    archive, repository = open_archive(repo_path, "archive1")
+
+    with repository:
+        repository.delete(Manifest.MANIFEST_ID)
+        repository.commit(compact=False)
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--repair", exit_code=0)
+    assert "archive1" in output
+    assert "archive2" in output
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=0)
+
+
+def test_corrupted_manifest(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    check_cmd_setUp(archiver)
+    archive, repository = open_archive(repo_path, "archive1")
+
+    with repository:
+        manifest = repository.get(Manifest.MANIFEST_ID)
+        corrupted_manifest = manifest + b"corrupted!"
+        repository.put(Manifest.MANIFEST_ID, corrupted_manifest)
+        repository.commit(compact=False)
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--repair", exit_code=0)
+    assert "archive1" in output
+    assert "archive2" in output
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=0)
+
+
+def test_manifest_rebuild_corrupted_chunk(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    check_cmd_setUp(archiver)
+    archive, repository = open_archive(repo_path, "archive1")
+
+    with repository:
+        manifest = repository.get(Manifest.MANIFEST_ID)
+        corrupted_manifest = manifest + b"corrupted!"
+        repository.put(Manifest.MANIFEST_ID, corrupted_manifest)
+        chunk = repository.get(archive.id)
+        corrupted_chunk = chunk + b"corrupted!"
+        repository.put(archive.id, corrupted_chunk)
+        repository.commit(compact=False)
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--repair", exit_code=0)
+    assert "archive2" in output
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=0)
+
+
+def test_manifest_rebuild_duplicate_archive(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    check_cmd_setUp(archiver)
+    archive, repository = open_archive(repo_path, "archive1")
+    repo_objs = archive.repo_objs
+
+    with repository:
+        manifest = repository.get(Manifest.MANIFEST_ID)
+        corrupted_manifest = manifest + b"corrupted!"
+        repository.put(Manifest.MANIFEST_ID, corrupted_manifest)
+        archive = msgpack.packb(
+            {
+                "command_line": "",
+                "item_ptrs": [],
+                "hostname": "foo",
+                "username": "bar",
+                "name": "archive1",
+                "time": "2016-12-15T18:49:51.849711",
+                "version": 2,
+            }
+        )
+        archive_id = repo_objs.id_hash(archive)
+        repository.put(archive_id, repo_objs.format(archive_id, {}, archive))
+        repository.commit(compact=False)
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)
+    cmd(archiver, f"--repo={repo_location}", "check", "--repair", exit_code=0)
+    output = cmd(archiver, f"--repo={repo_location}", "rlist")
+    assert "archive1" in output
+    assert "archive1.1" in output
+    assert "archive2" in output
 
 
 
 
-class RemoteArchiverCheckTestCase(RemoteArchiverTestCaseBase, ArchiverCheckTestCase):
-    """run the same tests, but with a remote repository"""
+def test_extra_chunks(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    check_cmd_setUp(archiver)
+    if archiver.prefix == "ssh://__testsuite__":
+        pytest.skip("only works locally")
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=0)
+    with Repository(archiver.repository_location, exclusive=True) as repository:
+        repository.put(b"01234567890123456789012345678901", b"xxxx")
+        repository.commit(compact=False)
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)
+    cmd(archiver, f"--repo={repo_location}", "check", "--repair", exit_code=0)
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=0)
+    cmd(archiver, f"--repo={repo_location}", "extract", "archive1", "--dry-run", exit_code=0)
 
 
-    @unittest.skip("only works locally")
-    def test_empty_repository(self):
-        pass
 
 
-    @unittest.skip("only works locally")
-    def test_extra_chunks(self):
-        pass
+@pytest.mark.parametrize("init_args", [["--encryption=repokey-aes-ocb"], ["--encryption", "none"]])
+def test_verify_data(archivers, request, init_args):
+    archiver = request.getfixturevalue(archivers)
+    check_cmd_setUp(archiver)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    shutil.rmtree(repo_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", *init_args)
+    create_src_archive(archiver, "archive1")
+    archive, repository = open_archive(repo_path, "archive1")
+    with repository:
+        for item in archive.iter_items():
+            if item.path.endswith(src_file):
+                chunk = item.chunks[-1]
+                data = repository.get(chunk.id)
+                data = data[0:100] + b"x" + data[101:]
+                repository.put(chunk.id, data)
+                break
+        repository.commit(compact=False)
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=0)
+    output = cmd(archiver, f"--repo={repo_location}", "check", "--verify-data", exit_code=1)
+    assert bin_to_hex(chunk.id) + ", integrity error" in output
+    # repair (heal is tested in another test)
+    output = cmd(archiver, f"--repo={repo_location}", "check", "--repair", "--verify-data", exit_code=0)
+    assert bin_to_hex(chunk.id) + ", integrity error" in output
+    assert f"{src_file}: New missing file chunk detected" in output
 
 
 
 
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverCheckTestCase):
-    """runs the same tests, but via the borg binary"""
+def test_empty_repository(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    if archiver.prefix == "ssh://__testsuite__":
+        pytest.skip("only works locally")
+    check_cmd_setUp(archiver)
+    repo_location = archiver.repository_location
+    with Repository(repo_location, exclusive=True) as repository:
+        for id_ in repository.list():
+            repository.delete(id_)
+        repository.commit(compact=False)
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)

+ 433 - 376
src/borg/testsuite/archiver/checks.py

@@ -1,6 +1,5 @@
 import os
 import os
 import shutil
 import shutil
-import unittest
 from datetime import datetime, timezone, timedelta
 from datetime import datetime, timezone, timedelta
 from unittest.mock import patch
 from unittest.mock import patch
 
 
@@ -9,391 +8,449 @@ import pytest
 from ...cache import Cache, LocalCache
 from ...cache import Cache, LocalCache
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
 from ...crypto.key import TAMRequiredError
 from ...crypto.key import TAMRequiredError
-from ...helpers import Location, get_security_dir
+from ...helpers import Location, get_security_dir, bin_to_hex
 from ...helpers import EXIT_ERROR
 from ...helpers import EXIT_ERROR
-from ...helpers import bin_to_hex
 from ...helpers import msgpack
 from ...helpers import msgpack
 from ...manifest import Manifest, MandatoryFeatureUnsupported
 from ...manifest import Manifest, MandatoryFeatureUnsupported
 from ...remote import RemoteRepository, PathNotAllowed
 from ...remote import RemoteRepository, PathNotAllowed
 from ...repository import Repository
 from ...repository import Repository
 from .. import llfuse
 from .. import llfuse
 from .. import changedir, environment_variable
 from .. import changedir, environment_variable
-from . import ArchiverTestCaseBase, RemoteArchiverTestCaseBase, RK_ENCRYPTION
-
-
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def get_security_dir(self):
-        repository_id = bin_to_hex(self._extract_repository_id(self.repository_path))
-        return get_security_dir(repository_id)
-
-    def test_repository_swap_detection(self):
-        self.create_test_files()
-        os.environ["BORG_PASSPHRASE"] = "passphrase"
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        repository_id = self._extract_repository_id(self.repository_path)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        shutil.rmtree(self.repository_path)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        self._set_repository_id(self.repository_path, repository_id)
-        self.assert_equal(repository_id, self._extract_repository_id(self.repository_path))
-        if self.FORK_DEFAULT:
-            self.cmd(f"--repo={self.repository_location}", "create", "test.2", "input", exit_code=EXIT_ERROR)
-        else:
-            with pytest.raises(Cache.EncryptionMethodMismatch):
-                self.cmd(f"--repo={self.repository_location}", "create", "test.2", "input")
-
-    def test_repository_swap_detection2(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}_unencrypted", "rcreate", "--encryption=none")
-        os.environ["BORG_PASSPHRASE"] = "passphrase"
-        self.cmd(f"--repo={self.repository_location}_encrypted", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}_encrypted", "create", "test", "input")
-        shutil.rmtree(self.repository_path + "_encrypted")
-        os.replace(self.repository_path + "_unencrypted", self.repository_path + "_encrypted")
-        if self.FORK_DEFAULT:
-            self.cmd(f"--repo={self.repository_location}_encrypted", "create", "test.2", "input", exit_code=EXIT_ERROR)
-        else:
-            with pytest.raises(Cache.RepositoryAccessAborted):
-                self.cmd(f"--repo={self.repository_location}_encrypted", "create", "test.2", "input")
-
-    def test_repository_swap_detection_no_cache(self):
-        self.create_test_files()
-        os.environ["BORG_PASSPHRASE"] = "passphrase"
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        repository_id = self._extract_repository_id(self.repository_path)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        shutil.rmtree(self.repository_path)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        self._set_repository_id(self.repository_path, repository_id)
-        self.assert_equal(repository_id, self._extract_repository_id(self.repository_path))
-        self.cmd(f"--repo={self.repository_location}", "rdelete", "--cache-only")
-        if self.FORK_DEFAULT:
-            self.cmd(f"--repo={self.repository_location}", "create", "test.2", "input", exit_code=EXIT_ERROR)
-        else:
-            with pytest.raises(Cache.EncryptionMethodMismatch):
-                self.cmd(f"--repo={self.repository_location}", "create", "test.2", "input")
-
-    def test_repository_swap_detection2_no_cache(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}_unencrypted", "rcreate", "--encryption=none")
-        os.environ["BORG_PASSPHRASE"] = "passphrase"
-        self.cmd(f"--repo={self.repository_location}_encrypted", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}_encrypted", "create", "test", "input")
-        self.cmd(f"--repo={self.repository_location}_unencrypted", "rdelete", "--cache-only")
-        self.cmd(f"--repo={self.repository_location}_encrypted", "rdelete", "--cache-only")
-        shutil.rmtree(self.repository_path + "_encrypted")
-        os.replace(self.repository_path + "_unencrypted", self.repository_path + "_encrypted")
-        if self.FORK_DEFAULT:
-            self.cmd(f"--repo={self.repository_location}_encrypted", "create", "test.2", "input", exit_code=EXIT_ERROR)
-        else:
-            with pytest.raises(Cache.RepositoryAccessAborted):
-                self.cmd(f"--repo={self.repository_location}_encrypted", "create", "test.2", "input")
-
-    def test_repository_swap_detection_repokey_blank_passphrase(self):
-        # Check that a repokey repo with a blank passphrase is considered like a plaintext repo.
-        self.create_test_files()
-        # User initializes her repository with her passphrase
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        # Attacker replaces it with her own repository, which is encrypted but has no passphrase set
-        shutil.rmtree(self.repository_path)
-        with environment_variable(BORG_PASSPHRASE=""):
-            self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-            # Delete cache & security database, AKA switch to user perspective
-            self.cmd(f"--repo={self.repository_location}", "rdelete", "--cache-only")
-            shutil.rmtree(self.get_security_dir())
-        with environment_variable(BORG_PASSPHRASE=None):
-            # This is the part were the user would be tricked, e.g. she assumes that BORG_PASSPHRASE
-            # is set, while it isn't. Previously this raised no warning,
-            # since the repository is, technically, encrypted.
-            if self.FORK_DEFAULT:
-                self.cmd(f"--repo={self.repository_location}", "create", "test.2", "input", exit_code=EXIT_ERROR)
-            else:
-                with pytest.raises(Cache.CacheInitAbortedError):
-                    self.cmd(f"--repo={self.repository_location}", "create", "test.2", "input")
-
-    def test_repository_move(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        security_dir = self.get_security_dir()
-        os.replace(self.repository_path, self.repository_path + "_new")
-        with environment_variable(BORG_RELOCATED_REPO_ACCESS_IS_OK="yes"):
-            self.cmd(f"--repo={self.repository_location}_new", "rinfo")
-        with open(os.path.join(security_dir, "location")) as fd:
-            location = fd.read()
-            assert location == Location(self.repository_location + "_new").canonical_path()
-        # Needs no confirmation anymore
-        self.cmd(f"--repo={self.repository_location}_new", "rinfo")
-        shutil.rmtree(self.cache_path)
-        self.cmd(f"--repo={self.repository_location}_new", "rinfo")
-        shutil.rmtree(security_dir)
-        self.cmd(f"--repo={self.repository_location}_new", "rinfo")
-        for file in ("location", "key-type", "manifest-timestamp"):
-            assert os.path.exists(os.path.join(security_dir, file))
-
-    def test_security_dir_compat(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        with open(os.path.join(self.get_security_dir(), "location"), "w") as fd:
-            fd.write("something outdated")
-        # This is fine, because the cache still has the correct information. security_dir and cache can disagree
-        # if older versions are used to confirm a renamed repository.
-        self.cmd(f"--repo={self.repository_location}", "rinfo")
-
-    def test_unknown_unencrypted(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        # Ok: repository is known
-        self.cmd(f"--repo={self.repository_location}", "rinfo")
-
-        # Ok: repository is still known (through security_dir)
-        shutil.rmtree(self.cache_path)
-        self.cmd(f"--repo={self.repository_location}", "rinfo")
-
-        # Needs confirmation: cache and security dir both gone (eg. another host or rm -rf ~)
-        shutil.rmtree(self.cache_path)
-        shutil.rmtree(self.get_security_dir())
-        if self.FORK_DEFAULT:
-            self.cmd(f"--repo={self.repository_location}", "rinfo", exit_code=EXIT_ERROR)
+from . import cmd, _extract_repository_id, open_repository, check_cache, create_test_files, create_src_archive
+from . import _set_repository_id, create_regular_file, assert_creates_file, RK_ENCRYPTION
+
+
+def get_security_directory(repo_path):
+    repository_id = bin_to_hex(_extract_repository_id(repo_path))
+    return get_security_dir(repository_id)
+
+
+def add_unknown_feature(repo_path, operation):
+    with Repository(repo_path, exclusive=True) as repository:
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+        manifest.config["feature_flags"] = {operation.value: {"mandatory": ["unknown-feature"]}}
+        manifest.write()
+        repository.commit(compact=False)
+
+
+def cmd_raises_unknown_feature(archiver, args):
+    if archiver.FORK_DEFAULT:
+        cmd(archiver, *args, exit_code=EXIT_ERROR)
+    else:
+        with pytest.raises(MandatoryFeatureUnsupported) as excinfo:
+            cmd(archiver, *args)
+        assert excinfo.value.args == (["unknown-feature"],)
+
+
+def pytest_generate_tests(metafunc):
+    # Generates tests that run on both local and remote repos
+    if "archivers" in metafunc.fixturenames:
+        metafunc.parametrize("archivers", ["archiver", "remote_archiver"])
+
+
+def test_repository_swap_detection(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, input_path = archiver.repository_location, archiver.repository_path, archiver.input_path
+    create_test_files(input_path)
+    os.environ["BORG_PASSPHRASE"] = "passphrase"
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    repository_id = _extract_repository_id(repo_path)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    shutil.rmtree(repo_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    _set_repository_id(repo_path, repository_id)
+    assert repository_id == _extract_repository_id(repo_path)
+    if archiver.FORK_DEFAULT:
+        cmd(archiver, f"--repo={repo_location}", "create", "test.2", "input", exit_code=EXIT_ERROR)
+    else:
+        with pytest.raises(Cache.EncryptionMethodMismatch):
+            cmd(archiver, f"--repo={repo_location}", "create", "test.2", "input")
+
+
+def test_repository_swap_detection2(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, input_path = archiver.repository_location, archiver.repository_path, archiver.input_path
+    create_test_files(input_path)
+    cmd(archiver, f"--repo={repo_location}_unencrypted", "rcreate", "--encryption=none")
+    os.environ["BORG_PASSPHRASE"] = "passphrase"
+    cmd(archiver, f"--repo={repo_location}_encrypted", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}_encrypted", "create", "test", "input")
+    shutil.rmtree(repo_path + "_encrypted")
+    os.replace(repo_path + "_unencrypted", repo_path + "_encrypted")
+    if archiver.FORK_DEFAULT:
+        cmd(archiver, f"--repo={repo_location}_encrypted", "create", "test.2", "input", exit_code=EXIT_ERROR)
+    else:
+        with pytest.raises(Cache.RepositoryAccessAborted):
+            cmd(archiver, f"--repo={repo_location}_encrypted", "create", "test.2", "input")
+
+
+def test_repository_swap_detection_no_cache(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, input_path = archiver.repository_location, archiver.repository_path, archiver.input_path
+    create_test_files(input_path)
+    os.environ["BORG_PASSPHRASE"] = "passphrase"
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    repository_id = _extract_repository_id(repo_path)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    shutil.rmtree(repo_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    _set_repository_id(repo_path, repository_id)
+    assert repository_id == _extract_repository_id(repo_path)
+    cmd(archiver, f"--repo={repo_location}", "rdelete", "--cache-only")
+    if archiver.FORK_DEFAULT:
+        cmd(archiver, f"--repo={repo_location}", "create", "test.2", "input", exit_code=EXIT_ERROR)
+    else:
+        with pytest.raises(Cache.EncryptionMethodMismatch):
+            cmd(archiver, f"--repo={repo_location}", "create", "test.2", "input")
+
+
+def test_repository_swap_detection2_no_cache(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, input_path = archiver.repository_location, archiver.repository_path, archiver.input_path
+    create_test_files(input_path)
+    cmd(archiver, f"--repo={repo_location}_unencrypted", "rcreate", "--encryption=none")
+    os.environ["BORG_PASSPHRASE"] = "passphrase"
+    cmd(archiver, f"--repo={repo_location}_encrypted", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}_encrypted", "create", "test", "input")
+    cmd(archiver, f"--repo={repo_location}_unencrypted", "rdelete", "--cache-only")
+    cmd(archiver, f"--repo={repo_location}_encrypted", "rdelete", "--cache-only")
+    shutil.rmtree(repo_path + "_encrypted")
+    os.replace(repo_path + "_unencrypted", repo_path + "_encrypted")
+    if archiver.FORK_DEFAULT:
+        cmd(archiver, f"--repo={repo_location}_encrypted", "create", "test.2", "input", exit_code=EXIT_ERROR)
+    else:
+        with pytest.raises(Cache.RepositoryAccessAborted):
+            cmd(archiver, f"--repo={repo_location}_encrypted", "create", "test.2", "input")
+
+
+def test_repository_swap_detection_repokey_blank_passphrase(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, input_path = archiver.repository_location, archiver.repository_path, archiver.input_path
+    # Check that a repokey repo with a blank passphrase is considered like a plaintext repo.
+    create_test_files(input_path)
+    # User initializes her repository with her passphrase
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    # Attacker replaces it with her own repository, which is encrypted but has no passphrase set
+    shutil.rmtree(repo_path)
+    with environment_variable(BORG_PASSPHRASE=""):
+        cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+        # Delete cache & security database, AKA switch to user perspective
+        cmd(archiver, f"--repo={repo_location}", "rdelete", "--cache-only")
+        shutil.rmtree(get_security_directory(repo_path))
+    with environment_variable(BORG_PASSPHRASE=None):
+        # This is the part were the user would be tricked, e.g. she assumes that BORG_PASSPHRASE
+        # is set, while it isn't. Previously this raised no warning,
+        # since the repository is, technically, encrypted.
+        if archiver.FORK_DEFAULT:
+            cmd(archiver, f"--repo={repo_location}", "create", "test.2", "input", exit_code=EXIT_ERROR)
         else:
         else:
             with pytest.raises(Cache.CacheInitAbortedError):
             with pytest.raises(Cache.CacheInitAbortedError):
-                self.cmd(f"--repo={self.repository_location}", "rinfo")
-        with environment_variable(BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK="yes"):
-            self.cmd(f"--repo={self.repository_location}", "rinfo")
-
-    def add_unknown_feature(self, operation):
-        with Repository(self.repository_path, exclusive=True) as repository:
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-            manifest.config["feature_flags"] = {operation.value: {"mandatory": ["unknown-feature"]}}
-            manifest.write()
-            repository.commit(compact=False)
-
-    def cmd_raises_unknown_feature(self, args):
-        if self.FORK_DEFAULT:
-            self.cmd(*args, exit_code=EXIT_ERROR)
-        else:
-            with pytest.raises(MandatoryFeatureUnsupported) as excinfo:
-                self.cmd(*args)
-            assert excinfo.value.args == (["unknown-feature"],)
-
-    def test_unknown_feature_on_create(self):
-        print(self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION))
-        self.add_unknown_feature(Manifest.Operation.WRITE)
-        self.cmd_raises_unknown_feature([f"--repo={self.repository_location}", "create", "test", "input"])
-
-    def test_unknown_feature_on_cache_sync(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "rdelete", "--cache-only")
-        self.add_unknown_feature(Manifest.Operation.READ)
-        self.cmd_raises_unknown_feature([f"--repo={self.repository_location}", "create", "test", "input"])
-
-    def test_unknown_feature_on_change_passphrase(self):
-        print(self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION))
-        self.add_unknown_feature(Manifest.Operation.CHECK)
-        self.cmd_raises_unknown_feature([f"--repo={self.repository_location}", "key", "change-passphrase"])
-
-    def test_unknown_feature_on_read(self):
-        print(self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION))
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.add_unknown_feature(Manifest.Operation.READ)
-        with changedir("output"):
-            self.cmd_raises_unknown_feature([f"--repo={self.repository_location}", "extract", "test"])
-
-        self.cmd_raises_unknown_feature([f"--repo={self.repository_location}", "rlist"])
-        self.cmd_raises_unknown_feature([f"--repo={self.repository_location}", "info", "-a", "test"])
-
-    def test_unknown_feature_on_rename(self):
-        print(self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION))
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.add_unknown_feature(Manifest.Operation.CHECK)
-        self.cmd_raises_unknown_feature([f"--repo={self.repository_location}", "rename", "test", "other"])
-
-    def test_unknown_feature_on_delete(self):
-        print(self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION))
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.add_unknown_feature(Manifest.Operation.DELETE)
-        # delete of an archive raises
-        self.cmd_raises_unknown_feature([f"--repo={self.repository_location}", "delete", "-a", "test"])
-        self.cmd_raises_unknown_feature([f"--repo={self.repository_location}", "prune", "--keep-daily=3"])
-        # delete of the whole repository ignores features
-        self.cmd(f"--repo={self.repository_location}", "rdelete")
-
-    @unittest.skipUnless(llfuse, "llfuse not installed")
-    def test_unknown_feature_on_mount(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.add_unknown_feature(Manifest.Operation.READ)
-        mountpoint = os.path.join(self.tmpdir, "mountpoint")
-        os.mkdir(mountpoint)
-        # XXX this might hang if it doesn't raise an error
-        self.cmd_raises_unknown_feature([f"--repo={self.repository_location}::test", "mount", mountpoint])
-
-    @pytest.mark.allow_cache_wipe
-    def test_unknown_mandatory_feature_in_cache(self):
-        remote_repo = bool(self.prefix)
-        print(self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION))
-
-        with Repository(self.repository_path, exclusive=True) as repository:
-            if remote_repo:
-                repository._location = Location(self.repository_location)
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-            with Cache(repository, manifest) as cache:
-                cache.begin_txn()
-                cache.cache_config.mandatory_features = {"unknown-feature"}
-                cache.commit()
-
-        if self.FORK_DEFAULT:
-            self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        else:
-            called = False
-            wipe_cache_safe = LocalCache.wipe_cache
-
-            def wipe_wrapper(*args):
-                nonlocal called
-                called = True
-                wipe_cache_safe(*args)
-
-            with patch.object(LocalCache, "wipe_cache", wipe_wrapper):
-                self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-
-            assert called
-
-        with Repository(self.repository_path, exclusive=True) as repository:
-            if remote_repo:
-                repository._location = Location(self.repository_location)
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-            with Cache(repository, manifest) as cache:
-                assert cache.cache_config.mandatory_features == set()
-
-    def test_check_cache(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        with self.open_repository() as repository:
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-            with Cache(repository, manifest, sync=False) as cache:
-                cache.begin_txn()
-                cache.chunks.incref(list(cache.chunks.iteritems())[0][0])
-                cache.commit()
-        with pytest.raises(AssertionError):
-            self.check_cache()
-
-
-class ManifestAuthenticationTest(ArchiverTestCaseBase):
-    def spoof_manifest(self, repository):
-        with repository:
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-            cdata = manifest.repo_objs.format(
-                Manifest.MANIFEST_ID,
-                {},
-                msgpack.packb(
-                    {
-                        "version": 1,
-                        "archives": {},
-                        "config": {},
-                        "timestamp": (datetime.now(tz=timezone.utc) + timedelta(days=1)).isoformat(
-                            timespec="microseconds"
-                        ),
-                    }
-                ),
+                cmd(archiver, f"--repo={repo_location}", "create", "test.2", "input")
+
+
+def test_repository_move(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    security_dir = get_security_directory(repo_path)
+    os.replace(repo_path, repo_path + "_new")
+    with environment_variable(BORG_RELOCATED_REPO_ACCESS_IS_OK="yes"):
+        cmd(archiver, f"--repo={repo_location}_new", "rinfo")
+    with open(os.path.join(security_dir, "location")) as fd:
+        location = fd.read()
+        assert location == Location(repo_location + "_new").canonical_path()
+    # Needs no confirmation anymore
+    cmd(archiver, f"--repo={repo_location}_new", "rinfo")
+    shutil.rmtree(archiver.cache_path)
+    cmd(archiver, f"--repo={repo_location}_new", "rinfo")
+    shutil.rmtree(security_dir)
+    cmd(archiver, f"--repo={repo_location}_new", "rinfo")
+    for file in ("location", "key-type", "manifest-timestamp"):
+        assert os.path.exists(os.path.join(security_dir, file))
+
+
+def test_security_dir_compat(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    with open(os.path.join(get_security_directory(repo_path), "location"), "w") as fd:
+        fd.write("something outdated")
+    # This is fine, because the cache still has the correct information. security_dir and cache can disagree
+    # if older versions are used to confirm a renamed repository.
+    cmd(archiver, f"--repo={repo_location}", "rinfo")
+
+
+def test_unknown_unencrypted(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, cache_path = archiver.repository_location, archiver.repository_path, archiver.cache_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    # Ok: repository is known
+    cmd(archiver, f"--repo={repo_location}", "rinfo")
+
+    # Ok: repository is still known (through security_dir)
+    shutil.rmtree(cache_path)
+    cmd(archiver, f"--repo={repo_location}", "rinfo")
+
+    # Needs confirmation: cache and security dir both gone (e.g. another host or rm -rf ~)
+    shutil.rmtree(get_security_directory(repo_path))
+    if archiver.FORK_DEFAULT:
+        cmd(archiver, f"--repo={repo_location}", "rinfo", exit_code=EXIT_ERROR)
+    else:
+        with pytest.raises(Cache.CacheInitAbortedError):
+            cmd(archiver, f"--repo={repo_location}", "rinfo")
+    with environment_variable(BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK="yes"):
+        cmd(archiver, f"--repo={repo_location}", "rinfo")
+
+
+def test_unknown_feature_on_create(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    print(cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION))
+    add_unknown_feature(repo_path, Manifest.Operation.WRITE)
+    cmd_raises_unknown_feature(archiver, [f"--repo={repo_location}", "create", "test", "input"])
+
+
+def test_unknown_feature_on_cache_sync(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "rdelete", "--cache-only")
+    add_unknown_feature(repo_path, Manifest.Operation.READ)
+    cmd_raises_unknown_feature(archiver, [f"--repo={repo_location}", "create", "test", "input"])
+
+
+def test_unknown_feature_on_change_passphrase(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    print(cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION))
+    add_unknown_feature(repo_path, Manifest.Operation.CHECK)
+    cmd_raises_unknown_feature(archiver, [f"--repo={repo_location}", "key", "change-passphrase"])
+
+
+def test_unknown_feature_on_read(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    print(cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION))
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    add_unknown_feature(repo_path, Manifest.Operation.READ)
+    with changedir("output"):
+        cmd_raises_unknown_feature(archiver, [f"--repo={repo_location}", "extract", "test"])
+    cmd_raises_unknown_feature(archiver, [f"--repo={repo_location}", "rlist"])
+    cmd_raises_unknown_feature(archiver, [f"--repo={repo_location}", "info", "-a", "test"])
+
+
+def test_unknown_feature_on_rename(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    print(cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION))
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    add_unknown_feature(repo_path, Manifest.Operation.CHECK)
+    cmd_raises_unknown_feature(archiver, [f"--repo={repo_location}", "rename", "test", "other"])
+
+
+def test_unknown_feature_on_delete(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    print(cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION))
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    add_unknown_feature(repo_path, Manifest.Operation.DELETE)
+    # delete of an archive raises
+    cmd_raises_unknown_feature(archiver, [f"--repo={repo_location}", "delete", "-a", "test"])
+    cmd_raises_unknown_feature(archiver, [f"--repo={repo_location}", "prune", "--keep-daily=3"])
+    # delete of the whole repository ignores features
+    cmd(archiver, f"--repo={repo_location}", "rdelete")
+
+
+@pytest.mark.skipif(not llfuse, reason="llfuse not installed")
+def test_unknown_feature_on_mount(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    add_unknown_feature(repo_path, Manifest.Operation.READ)
+    mountpoint = os.path.join(archiver.tmpdir, "mountpoint")
+    os.mkdir(mountpoint)
+    # XXX this might hang if it doesn't raise an error
+    cmd_raises_unknown_feature(archiver, [f"--repo={repo_location}::test", "mount", mountpoint])
+
+
+@pytest.mark.allow_cache_wipe
+def test_unknown_mandatory_feature_in_cache(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    remote_repo = bool(archiver.prefix)
+    print(cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION))
+
+    with Repository(repo_path, exclusive=True) as repository:
+        if remote_repo:
+            repository._location = Location(repo_location)
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+        with Cache(repository, manifest) as cache:
+            cache.begin_txn()
+            cache.cache_config.mandatory_features = {"unknown-feature"}
+            cache.commit()
+
+    if archiver.FORK_DEFAULT:
+        cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    else:
+        called = False
+        wipe_cache_safe = LocalCache.wipe_cache
+
+        def wipe_wrapper(*args):
+            nonlocal called
+            called = True
+            wipe_cache_safe(*args)
+
+        with patch.object(LocalCache, "wipe_cache", wipe_wrapper):
+            cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+
+        assert called
+
+    with Repository(repo_path, exclusive=True) as repository:
+        if remote_repo:
+            repository._location = Location(repo_location)
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+        with Cache(repository, manifest) as cache:
+            assert cache.cache_config.mandatory_features == set()
+
+
+def test_check_cache(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    with open_repository(archiver) as repository:
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+        with Cache(repository, manifest, sync=False) as cache:
+            cache.begin_txn()
+            cache.chunks.incref(list(cache.chunks.iteritems())[0][0])
+            cache.commit()
+    with pytest.raises(AssertionError):
+        check_cache(archiver)
+
+
+#  Begin manifest tests
+def spoof_manifest(repository):
+    with repository:
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+        cdata = manifest.repo_objs.format(
+            Manifest.MANIFEST_ID,
+            {},
+            msgpack.packb(
+                {
+                    "version": 1,
+                    "archives": {},
+                    "config": {},
+                    "timestamp": (datetime.now(tz=timezone.utc) + timedelta(days=1)).isoformat(timespec="microseconds"),
+                }
+            ),
+        )
+        repository.put(Manifest.MANIFEST_ID, cdata)
+        repository.commit(compact=False)
+
+
+def test_fresh_init_tam_required(archiver):
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    repository = Repository(repo_path, exclusive=True)
+    with repository:
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+        cdata = manifest.repo_objs.format(
+            Manifest.MANIFEST_ID,
+            {},
+            msgpack.packb(
+                {
+                    "version": 1,
+                    "archives": {},
+                    "timestamp": (datetime.now(tz=timezone.utc) + timedelta(days=1)).isoformat(timespec="microseconds"),
+                }
+            ),
+        )
+        repository.put(Manifest.MANIFEST_ID, cdata)
+        repository.commit(compact=False)
+
+    with pytest.raises(TAMRequiredError):
+        cmd(archiver, f"--repo={repo_location}", "rlist")
+
+
+def test_not_required(archiver):
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_src_archive(archiver, "archive1234")
+    repository = Repository(repo_path, exclusive=True)
+    # Manifest must be authenticated now
+    output = cmd(archiver, f"--repo={repo_location}", "rlist", "--debug")
+    assert "archive1234" in output
+    assert "TAM-verified manifest" in output
+    # Try to spoof / modify pre-1.0.9
+    spoof_manifest(repository)
+    # Fails
+    with pytest.raises(TAMRequiredError):
+        cmd(archiver, f"--repo={repo_location}", "rlist")
+
+
+# Begin Remote Tests
+def test_remote_repo_restrict_to_path(remote_archiver):
+    repo_location, repo_path = remote_archiver.repository_location, remote_archiver.repository_path
+    # restricted to repo directory itself:
+    with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-path", repo_path]):
+        cmd(remote_archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    # restricted to repo directory itself, fail for other directories with same prefix:
+    with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-path", repo_path]):
+        with pytest.raises(PathNotAllowed):
+            cmd(remote_archiver, f"--repo={repo_location}_0", "rcreate", RK_ENCRYPTION)
+    # restricted to a completely different path:
+    with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-path", "/foo"]):
+        with pytest.raises(PathNotAllowed):
+            cmd(remote_archiver, f"--repo={repo_location}_1", "rcreate", RK_ENCRYPTION)
+    path_prefix = os.path.dirname(repo_path)
+    # restrict to repo directory's parent directory:
+    with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-path", path_prefix]):
+        cmd(remote_archiver, f"--repo={repo_location}_2", "rcreate", RK_ENCRYPTION)
+    # restrict to repo directory's parent directory and another directory:
+    with patch.object(
+        RemoteRepository, "extra_test_args", ["--restrict-to-path", "/foo", "--restrict-to-path", path_prefix]
+    ):
+        cmd(remote_archiver, f"--repo={repo_location}_3", "rcreate", RK_ENCRYPTION)
+
+
+def test_remote_repo_restrict_to_repository(remote_archiver):
+    repo_location, repo_path = remote_archiver.repository_location, remote_archiver.repository_path
+    # restricted to repo directory itself:
+    with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-repository", repo_path]):
+        cmd(remote_archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    parent_path = os.path.join(repo_path, "..")
+    with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-repository", parent_path]):
+        with pytest.raises(PathNotAllowed):
+            cmd(remote_archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+
+
+def test_remote_repo_strip_components_doesnt_leak(remote_archiver):
+    repo_location, input_path = remote_archiver.repository_location, remote_archiver.input_path
+    cmd(remote_archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "dir/file", contents=b"test file contents 1")
+    create_regular_file(input_path, "dir/file2", contents=b"test file contents 2")
+    create_regular_file(input_path, "skipped-file1", contents=b"test file contents 3")
+    create_regular_file(input_path, "skipped-file2", contents=b"test file contents 4")
+    create_regular_file(input_path, "skipped-file3", contents=b"test file contents 5")
+    cmd(remote_archiver, f"--repo={repo_location}", "create", "test", "input")
+    marker = "cached responses left in RemoteRepository"
+    with changedir("output"):
+        res = cmd(remote_archiver, f"--repo={repo_location}", "extract", "test", "--debug", "--strip-components", "3")
+        assert marker not in res
+        with assert_creates_file("file"):
+            res = cmd(
+                remote_archiver, f"--repo={repo_location}", "extract", "test", "--debug", "--strip-components", "2"
             )
             )
-            repository.put(Manifest.MANIFEST_ID, cdata)
-            repository.commit(compact=False)
-
-    def test_fresh_init_tam_required(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        repository = Repository(self.repository_path, exclusive=True)
-        with repository:
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-            cdata = manifest.repo_objs.format(
-                Manifest.MANIFEST_ID,
-                {},
-                msgpack.packb(
-                    {
-                        "version": 1,
-                        "archives": {},
-                        "timestamp": (datetime.now(tz=timezone.utc) + timedelta(days=1)).isoformat(
-                            timespec="microseconds"
-                        ),
-                    }
-                ),
+            assert marker not in res
+        with assert_creates_file("dir/file"):
+            res = cmd(
+                remote_archiver, f"--repo={repo_location}", "extract", "test", "--debug", "--strip-components", "1"
             )
             )
-            repository.put(Manifest.MANIFEST_ID, cdata)
-            repository.commit(compact=False)
-
-        with pytest.raises(TAMRequiredError):
-            self.cmd(f"--repo={self.repository_location}", "rlist")
-
-    def test_not_required(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_src_archive("archive1234")
-        repository = Repository(self.repository_path, exclusive=True)
-        # Manifest must be authenticated now
-        output = self.cmd(f"--repo={self.repository_location}", "rlist", "--debug")
-        assert "archive1234" in output
-        assert "TAM-verified manifest" in output
-        # Try to spoof / modify pre-1.0.9
-        self.spoof_manifest(repository)
-        # Fails
-        with pytest.raises(TAMRequiredError):
-            self.cmd(f"--repo={self.repository_location}", "rlist")
-
-
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    def test_remote_repo_restrict_to_path(self):
-        # restricted to repo directory itself:
-        with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-path", self.repository_path]):
-            self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        # restricted to repo directory itself, fail for other directories with same prefix:
-        with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-path", self.repository_path]):
-            with pytest.raises(PathNotAllowed):
-                self.cmd(f"--repo={self.repository_location}_0", "rcreate", RK_ENCRYPTION)
-
-        # restricted to a completely different path:
-        with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-path", "/foo"]):
-            with pytest.raises(PathNotAllowed):
-                self.cmd(f"--repo={self.repository_location}_1", "rcreate", RK_ENCRYPTION)
-        path_prefix = os.path.dirname(self.repository_path)
-        # restrict to repo directory's parent directory:
-        with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-path", path_prefix]):
-            self.cmd(f"--repo={self.repository_location}_2", "rcreate", RK_ENCRYPTION)
-        # restrict to repo directory's parent directory and another directory:
-        with patch.object(
-            RemoteRepository, "extra_test_args", ["--restrict-to-path", "/foo", "--restrict-to-path", path_prefix]
-        ):
-            self.cmd(f"--repo={self.repository_location}_3", "rcreate", RK_ENCRYPTION)
-
-    def test_remote_repo_restrict_to_repository(self):
-        # restricted to repo directory itself:
-        with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-repository", self.repository_path]):
-            self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        parent_path = os.path.join(self.repository_path, "..")
-        with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-repository", parent_path]):
-            with pytest.raises(PathNotAllowed):
-                self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-
-    def test_remote_repo_strip_components_doesnt_leak(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("dir/file", contents=b"test file contents 1")
-        self.create_regular_file("dir/file2", contents=b"test file contents 2")
-        self.create_regular_file("skipped-file1", contents=b"test file contents 3")
-        self.create_regular_file("skipped-file2", contents=b"test file contents 4")
-        self.create_regular_file("skipped-file3", contents=b"test file contents 5")
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        marker = "cached responses left in RemoteRepository"
-        with changedir("output"):
-            res = self.cmd(
-                f"--repo={self.repository_location}", "extract", "test", "--debug", "--strip-components", "3"
+            assert marker not in res
+        with assert_creates_file("input/dir/file"):
+            res = cmd(
+                remote_archiver, f"--repo={repo_location}", "extract", "test", "--debug", "--strip-components", "0"
             )
             )
             assert marker not in res
             assert marker not in res
-            with self.assert_creates_file("file"):
-                res = self.cmd(
-                    f"--repo={self.repository_location}", "extract", "test", "--debug", "--strip-components", "2"
-                )
-                assert marker not in res
-            with self.assert_creates_file("dir/file"):
-                res = self.cmd(
-                    f"--repo={self.repository_location}", "extract", "test", "--debug", "--strip-components", "1"
-                )
-                assert marker not in res
-            with self.assert_creates_file("input/dir/file"):
-                res = self.cmd(
-                    f"--repo={self.repository_location}", "extract", "test", "--debug", "--strip-components", "0"
-                )
-                assert marker not in res

+ 43 - 45
src/borg/testsuite/archiver/config_cmd.py

@@ -1,48 +1,46 @@
 import os
 import os
-import unittest
 
 
-from ...constants import *  # NOQA
-from . import ArchiverTestCaseBase, ArchiverTestCaseBinaryBase, RK_ENCRYPTION, BORG_EXES
-
-
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_config(self):
-        self.create_test_files()
-        os.unlink("input/flagfile")
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(f"--repo={self.repository_location}", "config", "--list")
-        self.assert_in("[repository]", output)
-        self.assert_in("version", output)
-        self.assert_in("segments_per_dir", output)
-        self.assert_in("storage_quota", output)
-        self.assert_in("append_only", output)
-        self.assert_in("additional_free_space", output)
-        self.assert_in("id", output)
-        self.assert_not_in("last_segment_checked", output)
-
-        output = self.cmd(f"--repo={self.repository_location}", "config", "last_segment_checked", exit_code=1)
-        self.assert_in("No option ", output)
-        self.cmd(f"--repo={self.repository_location}", "config", "last_segment_checked", "123")
-        output = self.cmd(f"--repo={self.repository_location}", "config", "last_segment_checked")
-        assert output == "123" + os.linesep
-        output = self.cmd(f"--repo={self.repository_location}", "config", "--list")
-        self.assert_in("last_segment_checked", output)
-        self.cmd(f"--repo={self.repository_location}", "config", "--delete", "last_segment_checked")
+import pytest
 
 
-        for cfg_key, cfg_value in [("additional_free_space", "2G"), ("repository.append_only", "1")]:
-            output = self.cmd(f"--repo={self.repository_location}", "config", cfg_key)
-            assert output == "0" + os.linesep
-            self.cmd(f"--repo={self.repository_location}", "config", cfg_key, cfg_value)
-            output = self.cmd(f"--repo={self.repository_location}", "config", cfg_key)
-            assert output == cfg_value + os.linesep
-            self.cmd(f"--repo={self.repository_location}", "config", "--delete", cfg_key)
-            self.cmd(f"--repo={self.repository_location}", "config", cfg_key, exit_code=1)
-
-        self.cmd(f"--repo={self.repository_location}", "config", "--list", "--delete", exit_code=2)
-        self.cmd(f"--repo={self.repository_location}", "config", exit_code=2)
-        self.cmd(f"--repo={self.repository_location}", "config", "invalid-option", exit_code=1)
-
-
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    """runs the same tests, but via the borg binary"""
+from ...constants import *  # NOQA
+from . import RK_ENCRYPTION, create_test_files, cmd
+
+
+@pytest.mark.parametrize("archivers", ["archiver", "binary_archiver"])
+def test_config(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    create_test_files(archiver.input_path)
+    os.unlink("input/flagfile")
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(archiver, f"--repo={repo_location}", "config", "--list")
+    assert "[repository]" in output
+    assert "version" in output
+    assert "segments_per_dir" in output
+    assert "storage_quota" in output
+    assert "append_only" in output
+    assert "additional_free_space" in output
+    assert "id" in output
+    assert "last_segment_checked" not in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "config", "last_segment_checked", exit_code=1)
+    assert "No option " in output
+    cmd(archiver, f"--repo={repo_location}", "config", "last_segment_checked", "123")
+    output = cmd(archiver, f"--repo={repo_location}", "config", "last_segment_checked")
+    assert output == "123" + os.linesep
+    output = cmd(archiver, f"--repo={repo_location}", "config", "--list")
+    assert "last_segment_checked" in output
+    cmd(archiver, f"--repo={repo_location}", "config", "--delete", "last_segment_checked")
+
+    for cfg_key, cfg_value in [("additional_free_space", "2G"), ("repository.append_only", "1")]:
+        output = cmd(archiver, f"--repo={repo_location}", "config", cfg_key)
+        assert output == "0" + os.linesep
+        cmd(archiver, f"--repo={repo_location}", "config", cfg_key, cfg_value)
+        output = cmd(archiver, f"--repo={repo_location}", "config", cfg_key)
+        assert output == cfg_value + os.linesep
+        cmd(archiver, f"--repo={repo_location}", "config", "--delete", cfg_key)
+        cmd(archiver, f"--repo={repo_location}", "config", cfg_key, exit_code=1)
+
+    cmd(archiver, f"--repo={repo_location}", "config", "--list", "--delete", exit_code=2)
+    cmd(archiver, f"--repo={repo_location}", "config", exit_code=2)
+    cmd(archiver, f"--repo={repo_location}", "config", "invalid-option", exit_code=1)

+ 98 - 90
src/borg/testsuite/archiver/corruption.py

@@ -8,93 +8,101 @@ import pytest
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
 from ...crypto.file_integrity import FileIntegrityError
 from ...crypto.file_integrity import FileIntegrityError
 from ...helpers import bin_to_hex
 from ...helpers import bin_to_hex
-from . import ArchiverTestCaseBase, RK_ENCRYPTION
-
-
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_check_corrupted_repository(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_src_archive("test")
-        self.cmd(f"--repo={self.repository_location}", "extract", "test", "--dry-run")
-        self.cmd(f"--repo={self.repository_location}", "check")
-
-        name = sorted(os.listdir(os.path.join(self.tmpdir, "repository", "data", "0")), reverse=True)[1]
-        with open(os.path.join(self.tmpdir, "repository", "data", "0", name), "r+b") as fd:
-            fd.seek(100)
-            fd.write(b"XXXX")
-
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
-
-
-class ArchiverCorruptionTestCase(ArchiverTestCaseBase):
-    def setUp(self):
-        super().setUp()
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cache_path = json.loads(self.cmd(f"--repo={self.repository_location}", "rinfo", "--json"))["cache"]["path"]
-
-    def corrupt(self, file, amount=1):
-        with open(file, "r+b") as fd:
-            fd.seek(-amount, io.SEEK_END)
-            corrupted = bytes(255 - c for c in fd.read(amount))
-            fd.seek(-amount, io.SEEK_END)
-            fd.write(corrupted)
-
-    def test_cache_chunks(self):
-        self.corrupt(os.path.join(self.cache_path, "chunks"))
-
-        if self.FORK_DEFAULT:
-            out = self.cmd(f"--repo={self.repository_location}", "rinfo", exit_code=2)
-            assert "failed integrity check" in out
-        else:
-            with pytest.raises(FileIntegrityError):
-                self.cmd(f"--repo={self.repository_location}", "rinfo")
-
-    def test_cache_files(self):
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.corrupt(os.path.join(self.cache_path, "files"))
-        out = self.cmd(f"--repo={self.repository_location}", "create", "test1", "input")
-        # borg warns about the corrupt files cache, but then continues without files cache.
-        assert "files cache is corrupted" in out
-
-    def test_chunks_archive(self):
-        self.cmd(f"--repo={self.repository_location}", "create", "test1", "input")
-        # Find ID of test1 so we can corrupt it later :)
-        target_id = self.cmd(f"--repo={self.repository_location}", "rlist", "--format={id}{NL}").strip()
-        self.cmd(f"--repo={self.repository_location}", "create", "test2", "input")
-
-        # Force cache sync, creating archive chunks of test1 and test2 in chunks.archive.d
-        self.cmd(f"--repo={self.repository_location}", "rdelete", "--cache-only")
-        self.cmd(f"--repo={self.repository_location}", "rinfo", "--json")
-
-        chunks_archive = os.path.join(self.cache_path, "chunks.archive.d")
-        assert len(os.listdir(chunks_archive)) == 4  # two archives, one chunks cache and one .integrity file each
-
-        self.corrupt(os.path.join(chunks_archive, target_id + ".compact"))
-
-        # Trigger cache sync by changing the manifest ID in the cache config
-        config_path = os.path.join(self.cache_path, "config")
-        config = ConfigParser(interpolation=None)
-        config.read(config_path)
-        config.set("cache", "manifest", bin_to_hex(bytes(32)))
-        with open(config_path, "w") as fd:
-            config.write(fd)
-
-        # Cache sync notices corrupted archive chunks, but automatically recovers.
-        out = self.cmd(f"--repo={self.repository_location}", "create", "-v", "test3", "input", exit_code=1)
-        assert "Reading cached archive chunk index for test1" in out
-        assert "Cached archive chunk index of test1 is corrupted" in out
-        assert "Fetching and building archive index for test1" in out
-
-    def test_old_version_interfered(self):
-        # Modify the main manifest ID without touching the manifest ID in the integrity section.
-        # This happens if a version without integrity checking modifies the cache.
-        config_path = os.path.join(self.cache_path, "config")
-        config = ConfigParser(interpolation=None)
-        config.read(config_path)
-        config.set("cache", "manifest", bin_to_hex(bytes(32)))
-        with open(config_path, "w") as fd:
-            config.write(fd)
-
-        out = self.cmd(f"--repo={self.repository_location}", "rinfo")
-        assert "Cache integrity data not available: old Borg version modified the cache." in out
+from . import cmd, create_src_archive, create_test_files, RK_ENCRYPTION
+
+
+def test_check_corrupted_repository(archiver):
+    repo_location, tmpdir = archiver.repository_location, archiver.tmpdir
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_src_archive(archiver, "test")
+    cmd(archiver, f"--repo={repo_location}", "extract", "test", "--dry-run")
+    cmd(archiver, f"--repo={repo_location}", "check")
+
+    name = sorted(os.listdir(os.path.join(tmpdir, "repository", "data", "0")), reverse=True)[1]
+    with open(os.path.join(tmpdir, "repository", "data", "0", name), "r+b") as fd:
+        fd.seek(100)
+        fd.write(b"XXXX")
+
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)
+
+
+@pytest.fixture()
+def corrupted_archiver(archiver):
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_test_files(input_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    archiver.cache_path = json.loads(cmd(archiver, f"--repo={repo_location}", "rinfo", "--json"))["cache"]["path"]
+    yield archiver
+
+
+def corrupt(file, amount=1):
+    with open(file, "r+b") as fd:
+        fd.seek(-amount, io.SEEK_END)
+        corrupted = bytes(255 - c for c in fd.read(amount))
+        fd.seek(-amount, io.SEEK_END)
+        fd.write(corrupted)
+
+
+def test_cache_chunks(corrupted_archiver):
+    repo_location, cache_path = corrupted_archiver.repository_location, corrupted_archiver.cache_path
+    corrupt(os.path.join(cache_path, "chunks"))
+    if corrupted_archiver.FORK_DEFAULT:
+        out = cmd(corrupted_archiver, f"--repo={repo_location}", "rinfo", exit_code=2)
+        assert "failed integrity check" in out
+    else:
+        with pytest.raises(FileIntegrityError):
+            cmd(corrupted_archiver, f"--repo={repo_location}", "rinfo")
+
+
+def test_cache_files(corrupted_archiver):
+    repo_location, cache_path = corrupted_archiver.repository_location, corrupted_archiver.cache_path
+    cmd(corrupted_archiver, f"--repo={repo_location}", "create", "test", "input")
+    corrupt(os.path.join(cache_path, "files"))
+    out = cmd(corrupted_archiver, f"--repo={repo_location}", "create", "test1", "input")
+    # borg warns about the corrupt files cache, but then continues without files cache.
+    assert "files cache is corrupted" in out
+
+
+def test_chunks_archive(corrupted_archiver):
+    repo_location, cache_path = corrupted_archiver.repository_location, corrupted_archiver.cache_path
+    cmd(corrupted_archiver, f"--repo={repo_location}", "create", "test1", "input")
+    # Find ID of test1, so we can corrupt it later :)
+    target_id = cmd(corrupted_archiver, f"--repo={repo_location}", "rlist", "--format={id}{NL}").strip()
+    cmd(corrupted_archiver, f"--repo={repo_location}", "create", "test2", "input")
+
+    # Force cache sync, creating archive chunks of test1 and test2 in chunks.archive.d
+    cmd(corrupted_archiver, f"--repo={repo_location}", "rdelete", "--cache-only")
+    cmd(corrupted_archiver, f"--repo={repo_location}", "rinfo", "--json")
+
+    chunks_archive = os.path.join(cache_path, "chunks.archive.d")
+    assert len(os.listdir(chunks_archive)) == 4  # two archives, one chunks cache and one .integrity file each
+
+    corrupt(os.path.join(chunks_archive, target_id + ".compact"))
+
+    # Trigger cache sync by changing the manifest ID in the cache config
+    config_path = os.path.join(cache_path, "config")
+    config = ConfigParser(interpolation=None)
+    config.read(config_path)
+    config.set("cache", "manifest", bin_to_hex(bytes(32)))
+    with open(config_path, "w") as fd:
+        config.write(fd)
+
+    # Cache sync notices corrupted archive chunks, but automatically recovers.
+    out = cmd(corrupted_archiver, f"--repo={repo_location}", "create", "-v", "test3", "input", exit_code=1)
+    assert "Reading cached archive chunk index for test1" in out
+    assert "Cached archive chunk index of test1 is corrupted" in out
+    assert "Fetching and building archive index for test1" in out
+
+
+def test_old_version_interfered(corrupted_archiver):
+    # Modify the main manifest ID without touching the manifest ID in the integrity section.
+    # This happens if a version without integrity checking modifies the cache.
+    repo_location, cache_path = corrupted_archiver.repository_location, corrupted_archiver.cache_path
+    config_path = os.path.join(cache_path, "config")
+    config = ConfigParser(interpolation=None)
+    config.read(config_path)
+    config.set("cache", "manifest", bin_to_hex(bytes(32)))
+    with open(config_path, "w") as fd:
+        config.write(fd)
+    out = cmd(corrupted_archiver, f"--repo={repo_location}", "rinfo")
+    assert "Cache integrity data not available: old Borg version modified the cache." in out

+ 1090 - 948
src/borg/testsuite/archiver/create_cmd.py

@@ -7,7 +7,6 @@ import socket
 import stat
 import stat
 import subprocess
 import subprocess
 import time
 import time
-import unittest
 
 
 import pytest
 import pytest
 
 
@@ -27,958 +26,1101 @@ from .. import (
     same_ts_ns,
     same_ts_ns,
     is_root,
     is_root,
 )
 )
+
 from . import (
 from . import (
-    ArchiverTestCaseBase,
-    ArchiverTestCaseBinaryBase,
-    RemoteArchiverTestCaseBase,
-    RK_ENCRYPTION,
-    BORG_EXES,
+    cmd,
+    create_test_files,
+    assert_dirs_equal,
+    create_regular_file,
     requires_hardlinks,
     requires_hardlinks,
+    _create_test_caches,
+    _create_test_tagged,
+    _create_test_keep_tagged,
+    _assert_test_caches,
+    _assert_test_tagged,
+    _assert_test_keep_tagged,
+    RK_ENCRYPTION,
 )
 )
 
 
 
 
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_basic_functionality(self):
-        have_root = self.create_test_files()
-        # fork required to test show-rc output
-        output = self.cmd(
-            f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION, "--show-version", "--show-rc", fork=True
-        )
-        self.assert_in("borgbackup version", output)
-        self.assert_in("terminating with success status, rc 0", output)
-        self.cmd(f"--repo={self.repository_location}", "create", "--exclude-nodump", "test", "input")
-        output = self.cmd(
-            f"--repo={self.repository_location}", "create", "--exclude-nodump", "--stats", "test.2", "input"
-        )
-        self.assert_in("Archive name: test.2", output)
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-        list_output = self.cmd(f"--repo={self.repository_location}", "rlist", "--short")
-        self.assert_in("test", list_output)
-        self.assert_in("test.2", list_output)
-        expected = [
-            "input",
-            "input/bdev",
-            "input/cdev",
-            "input/dir2",
-            "input/dir2/file2",
-            "input/empty",
-            "input/file1",
-            "input/flagfile",
-        ]
-        if are_fifos_supported():
-            expected.append("input/fifo1")
-        if are_symlinks_supported():
-            expected.append("input/link1")
-        if are_hardlinks_supported():
-            expected.append("input/hardlink")
-        if not have_root:
-            # we could not create these device files without (fake)root
-            expected.remove("input/bdev")
-            expected.remove("input/cdev")
-        if has_lchflags:
-            # remove the file we did not backup, so input and output become equal
-            expected.remove("input/flagfile")  # this file is UF_NODUMP
-            os.remove(os.path.join("input", "flagfile"))
-        list_output = self.cmd(f"--repo={self.repository_location}", "list", "test", "--short")
-        for name in expected:
-            self.assert_in(name, list_output)
-        self.assert_dirs_equal("input", "output/input")
-        info_output = self.cmd(f"--repo={self.repository_location}", "info", "-a", "test")
-        item_count = 5 if has_lchflags else 6  # one file is UF_NODUMP
-        self.assert_in("Number of files: %d" % item_count, info_output)
-        shutil.rmtree(self.cache_path)
-        info_output2 = self.cmd(f"--repo={self.repository_location}", "info", "-a", "test")
-
-        def filter(output):
-            # filter for interesting "info" output, ignore cache rebuilding related stuff
-            prefixes = ["Name:", "Fingerprint:", "Number of files:", "This archive:", "All archives:", "Chunk index:"]
-            result = []
-            for line in output.splitlines():
-                for prefix in prefixes:
-                    if line.startswith(prefix):
-                        result.append(line)
-            return "\n".join(result)
-
-        # the interesting parts of info_output2 and info_output should be same
-        self.assert_equal(filter(info_output), filter(info_output2))
-
-    @pytest.mark.skipif(is_win32, reason="still broken on windows")
-    def test_archived_paths(self):
-        # As borg comes from the POSIX (Linux, UNIX) world, a lot of stuff assumes path separators
-        # to be slashes "/", e.g.: in archived items, for pattern matching.
-        # To make our lives easier and to support cross-platform extraction we always use slashes.
-        # Similarly, archived paths are expected to be full, but relative (have no leading slash).
-        full_path = os.path.abspath(os.path.join(self.input_path, "test"))
-        # remove windows drive letter, if any:
-        posix_path = full_path[2:] if full_path[1] == ":" else full_path
-        # only needed on windows in case there are backslashes:
-        posix_path = posix_path.replace("\\", "/")
-        # no leading slash in borg archives:
-        archived_path = posix_path.lstrip("/")
-        self.create_regular_file("test")
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input", full_path)
-        # "input" directory is recursed into, "input/test" is discovered and joined by borg's recursion.
-        # full_path was directly given as a cli argument and should end up as archive_path in the borg archive.
-        expected_paths = sorted(["input", "input/test", archived_path])
-        # check path in archived items:
-        archive_list = self.cmd(f"--repo={self.repository_location}", "list", "test", "--short")
-        assert expected_paths == sorted([path for path in archive_list.splitlines() if path])
-        # check path in archived items (json):
-        archive_list = self.cmd(f"--repo={self.repository_location}", "list", "test", "--json-lines")
-        assert expected_paths == sorted([json.loads(line)["path"] for line in archive_list.splitlines() if line])
-
-    @requires_hardlinks
-    def test_create_duplicate_root(self):
-        # setup for #5603
-        path_a = os.path.join(self.input_path, "a")
-        path_b = os.path.join(self.input_path, "b")
-        os.mkdir(path_a)
-        os.mkdir(path_b)
-        hl_a = os.path.join(path_a, "hardlink")
-        hl_b = os.path.join(path_b, "hardlink")
-        self.create_regular_file(hl_a, contents=b"123456")
-        os.link(hl_a, hl_b)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input", "input")  # give input twice!
-        # test if created archive has 'input' contents twice:
-        archive_list = self.cmd(f"--repo={self.repository_location}", "list", "test", "--json-lines")
-        paths = [json.loads(line)["path"] for line in archive_list.split("\n") if line]
-        # we have all fs items exactly once!
-        assert sorted(paths) == ["input", "input/a", "input/a/hardlink", "input/b", "input/b/hardlink"]
-
-    @pytest.mark.skipif(is_win32, reason="unix sockets not available on windows")
-    def test_unix_socket(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        try:
-            sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
-            sock.bind(os.path.join(self.input_path, "unix-socket"))
-        except PermissionError as err:
-            if err.errno == errno.EPERM:
-                pytest.skip("unix sockets disabled or not supported")
-            elif err.errno == errno.EACCES:
-                pytest.skip("permission denied to create unix sockets")
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        sock.close()
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-            assert not os.path.exists("input/unix-socket")
-
-    @pytest.mark.skipif(not is_utime_fully_supported(), reason="cannot properly setup and execute test without utime")
-    @pytest.mark.skipif(
-        not is_birthtime_fully_supported(), reason="cannot properly setup and execute test without birthtime"
+def pytest_generate_tests(metafunc):
+    # Generates tests that run on local and remote repos, as well as with a binary base.
+    if "archivers" in metafunc.fixturenames:
+        metafunc.parametrize("archivers", ["archiver", "remote_archiver", "binary_archiver"])
+
+
+def test_basic_functionality(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    if archiver.EXE:
+        pytest.skip("test_basic_functionality seems incompatible with fakeroot and/or the binary.")
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    have_root = create_test_files(input_path)
+    # fork required to test show-rc output
+    output = cmd(
+        archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION, "--show-version", "--show-rc", fork=True
+    )
+    assert "borgbackup version" in output
+    assert "terminating with success status, rc 0" in output
+    cmd(archiver, f"--repo={repo_location}", "create", "--exclude-nodump", "test", "input")
+    output = cmd(archiver, f"--repo={repo_location}", "create", "--exclude-nodump", "--stats", "test.2", "input")
+    assert "Archive name: test.2" in output
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+    list_output = cmd(archiver, f"--repo={repo_location}", "rlist", "--short")
+    assert "test" in list_output
+    assert "test.2" in list_output
+    expected = [
+        "input",
+        "input/bdev",
+        "input/cdev",
+        "input/dir2",
+        "input/dir2/file2",
+        "input/empty",
+        "input/file1",
+        "input/flagfile",
+    ]
+    if are_fifos_supported():
+        expected.append("input/fifo1")
+    if are_symlinks_supported():
+        expected.append("input/link1")
+    if are_hardlinks_supported():
+        expected.append("input/hardlink")
+    if not have_root:
+        # we could not create these device files without (fake)root
+        expected.remove("input/bdev")
+        expected.remove("input/cdev")
+    if has_lchflags:
+        # remove the file we did not back up, so input and output become equal
+        expected.remove("input/flagfile")  # this file is UF_NODUMP
+        os.remove(os.path.join("input", "flagfile"))
+    list_output = cmd(archiver, f"--repo={repo_location}", "list", "test", "--short")
+    for name in expected:
+        assert name in list_output
+    assert_dirs_equal("input", "output/input")
+    info_output = cmd(archiver, f"--repo={repo_location}", "info", "-a", "test")
+    item_count = 5 if has_lchflags else 6  # one file is UF_NODUMP
+    assert "Number of files: %d" % item_count in info_output
+    shutil.rmtree(archiver.cache_path)
+    info_output2 = cmd(archiver, f"--repo={repo_location}", "info", "-a", "test")
+
+    def filter(output):
+        # filter for interesting "info" output, ignore cache rebuilding related stuff
+        prefixes = ["Name:", "Fingerprint:", "Number of files:", "This archive:", "All archives:", "Chunk index:"]
+        result = []
+        for line in output.splitlines():
+            for prefix in prefixes:
+                if line.startswith(prefix):
+                    result.append(line)
+        return "\n".join(result)
+
+    # the interesting parts of info_output2 and info_output should be same
+    assert filter(info_output) == filter(info_output2)
+
+
+@pytest.mark.skipif(is_win32, reason="still broken on windows")
+def test_archived_paths(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    # As borg comes from the POSIX (Linux, UNIX) world, a lot of stuff assumes path separators
+    # to be slashes "/", e.g.: in archived items, for pattern matching.
+    # To make our lives easier and to support cross-platform extraction we always use slashes.
+    # Similarly, archived paths are expected to be full, but relative (have no leading slash).
+    full_path = os.path.abspath(os.path.join(archiver.input_path, "test"))
+    # remove windows drive letter, if any:
+    posix_path = full_path[2:] if full_path[1] == ":" else full_path
+    # only needed on Windows in case there are backslashes:
+    posix_path = posix_path.replace("\\", "/")
+    # no leading slash in borg archives:
+    archived_path = posix_path.lstrip("/")
+    create_regular_file(archiver.input_path, "test")
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input", full_path)
+    # "input" directory is recursed into, "input/test" is discovered and joined by borg's recursion.
+    # full_path was directly given as a cli argument and should end up as archive_path in the borg archive.
+    expected_paths = sorted(["input", "input/test", archived_path])
+    # check path in archived items:
+    archive_list = cmd(archiver, f"--repo={repo_location}", "list", "test", "--short")
+    assert expected_paths == sorted([path for path in archive_list.splitlines() if path])
+    # check path in archived items (json):
+    archive_list = cmd(archiver, f"--repo={repo_location}", "list", "test", "--json-lines")
+    assert expected_paths == sorted([json.loads(line)["path"] for line in archive_list.splitlines() if line])
+
+
+@requires_hardlinks
+def test_create_duplicate_root(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    # setup for #5603
+    path_a = os.path.join(input_path, "a")
+    path_b = os.path.join(input_path, "b")
+    os.mkdir(path_a)
+    os.mkdir(path_b)
+    hl_a = os.path.join(path_a, "hardlink")
+    hl_b = os.path.join(path_b, "hardlink")
+    create_regular_file(input_path, hl_a, contents=b"123456")
+    os.link(hl_a, hl_b)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input", "input")  # give input twice!
+    # test if created archive has 'input' contents twice:
+    archive_list = cmd(archiver, f"--repo={repo_location}", "list", "test", "--json-lines")
+    paths = [json.loads(line)["path"] for line in archive_list.split("\n") if line]
+    # we have all fs items exactly once!
+    assert sorted(paths) == ["input", "input/a", "input/a/hardlink", "input/b", "input/b/hardlink"]
+
+
+# FAILS: > sock.bind(os.path.join(input_path, "unix-socket"))
+# E           OSError: AF_UNIX path too long
+@pytest.mark.skipif(is_win32, reason="unix sockets not available on windows")
+def test_unix_socket(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    try:
+        print(f"\nINPUT PATH: {input_path}")
+        sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+        sock.bind(os.path.join(input_path, "unix-socket"))
+    except PermissionError as err:
+        if err.errno == errno.EPERM:
+            pytest.skip("unix sockets disabled or not supported")
+        elif err.errno == errno.EACCES:
+            pytest.skip("permission denied to create unix sockets")
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    sock.close()
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+        assert not os.path.exists("input/unix-socket")
+
+
+@pytest.mark.skipif(not is_utime_fully_supported(), reason="cannot properly setup and execute test without utime")
+@pytest.mark.skipif(
+    not is_birthtime_fully_supported(), reason="cannot properly setup and execute test without birth time"
+)
+def test_nobirthtime(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_test_files(input_path)
+    birthtime, mtime, atime = 946598400, 946684800, 946771200
+    os.utime("input/file1", (atime, birthtime))
+    os.utime("input/file1", (atime, mtime))
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input", "--nobirthtime")
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+    sti = os.stat("input/file1")
+    sto = os.stat("output/input/file1")
+    assert same_ts_ns(sti.st_birthtime * 1e9, birthtime * 1e9)
+    assert same_ts_ns(sto.st_birthtime * 1e9, mtime * 1e9)
+    assert same_ts_ns(sti.st_mtime_ns, sto.st_mtime_ns)
+    assert same_ts_ns(sto.st_mtime_ns, mtime * 1e9)
+
+
+def test_create_stdin(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    input_data = b"\x00foo\n\nbar\n   \n"
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "-", input=input_data)
+    item = json.loads(cmd(archiver, f"--repo={repo_location}", "list", "test", "--json-lines"))
+    assert item["size"] == len(input_data)
+    assert item["path"] == "stdin"
+    extracted_data = cmd(archiver, f"--repo={repo_location}", "extract", "test", "--stdout", binary_output=True)
+    assert extracted_data == input_data
+
+
+def test_create_stdin_checkpointing(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    chunk_size = 1000  # fixed chunker with this size, also volume based checkpointing after that volume
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    input_data = b"X" * (chunk_size * 2 - 1)  # one full and one partial chunk
+    cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        f"--chunker-params=fixed,{chunk_size}",
+        f"--checkpoint-volume={chunk_size}",
+        "test",
+        "-",
+        input=input_data,
+    )
+    # repo looking good overall? checks for rc == 0.
+    cmd(archiver, f"--repo={repo_location}", "check", "--debug")
+    # verify that there are no part files in final archive
+    out = cmd(archiver, f"--repo={repo_location}", "list", "test")
+    assert "stdin.borg_part" not in out
+    # verify full file
+    out = cmd(archiver, f"--repo={repo_location}", "extract", "test", "stdin", "--stdout", binary_output=True)
+    assert out == input_data
+
+
+def test_create_erroneous_file(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    chunk_size = 1000  # fixed chunker with this size, also volume based checkpointing after that volume
+    create_regular_file(input_path, os.path.join(input_path, "file1"), size=chunk_size * 2)
+    create_regular_file(input_path, os.path.join(input_path, "file2"), size=chunk_size * 2)
+    create_regular_file(input_path, os.path.join(input_path, "file3"), size=chunk_size * 2)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    flist = "".join(f"input/file{n}\n" for n in range(1, 4))
+    out = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        f"--chunker-params=fail,{chunk_size},rrrEEErrrr",
+        "--paths-from-stdin",
+        "--list",
+        "test",
+        input=flist.encode(),
+        exit_code=0,
+    )
+    assert "retry: 3 of " in out
+    assert "E input/file2" not in out  # we managed to read it in the 3rd retry (after 3 failed reads)
+    # repo looking good overall? checks for rc == 0.
+    cmd(archiver, f"--repo={repo_location}", "check", "--debug")
+    # check files in created archive
+    out = cmd(archiver, f"--repo={repo_location}", "list", "test")
+    assert "input/file1" in out
+    assert "input/file2" in out
+    assert "input/file3" in out
+
+
+@pytest.mark.skipif(is_root(), reason="test must not be run as (fake)root")
+def test_create_no_permission_file(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    file_path = os.path.join(input_path, "file")
+    create_regular_file(input_path, file_path + "1", size=1000)
+    create_regular_file(input_path, file_path + "2", size=1000)
+    create_regular_file(input_path, file_path + "3", size=1000)
+    # revoke read permissions on file2 for everybody, including us:
+    if is_win32:
+        subprocess.run(["icacls.exe", file_path + "2", "/deny", "everyone:(R)"])
+    else:
+        # note: this will NOT take away read permissions for root
+        os.chmod(file_path + "2", 0o000)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    flist = "".join(f"input/file{n}\n" for n in range(1, 4))
+    out = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "--paths-from-stdin",
+        "--list",
+        "test",
+        input=flist.encode(),
+        exit_code=1,  # WARNING status: could not back up file2.
+    )
+    assert "retry: 1 of " not in out  # retries were NOT attempted!
+    assert "E input/file2" in out  # no permissions!
+    # repo looking good overall? checks for rc == 0.
+    cmd(archiver, f"--repo={repo_location}", "check", "--debug")
+    # check files in created archive
+    out = cmd(archiver, f"--repo={repo_location}", "list", "test")
+    assert "input/file1" in out
+    assert "input/file2" not in out  # it skipped file2
+    assert "input/file3" in out
+
+
+def test_sanitized_stdin_name(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "--stdin-name", "./a//path", "test", "-", input=b"")
+    item = json.loads(cmd(archiver, f"--repo={repo_location}", "list", "test", "--json-lines"))
+    assert item["path"] == "a/path"
+
+
+def test_dotdot_stdin_name(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(
+        archiver, f"--repo={repo_location}", "create", "--stdin-name", "foo/../bar", "test", "-", input=b"", exit_code=2
+    )
+    assert output.endswith("'..' element in path 'foo/../bar'" + os.linesep)
+
+
+def test_dot_stdin_name(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(
+        archiver, f"--repo={repo_location}", "create", "--stdin-name", "./", "test", "-", input=b"", exit_code=2
+    )
+    assert output.endswith("'./' is not a valid file name" + os.linesep)
+
+
+def test_create_content_from_command(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    input_data = "some test content"
+    name = "a/b/c"
+    cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "--stdin-name",
+        name,
+        "--content-from-command",
+        "test",
+        "--",
+        "echo",
+        input_data,
+    )
+    item = json.loads(cmd(archiver, f"--repo={repo_location}", "list", "test", "--json-lines"))
+    assert item["size"] == len(input_data) + 1  # `echo` adds newline
+    assert item["path"] == name
+    extracted_data = cmd(archiver, f"--repo={repo_location}", "extract", "test", "--stdout")
+    assert extracted_data == input_data + "\n"
+
+
+def test_create_content_from_command_with_failed_command(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "--content-from-command",
+        "test",
+        "--",
+        "sh",
+        "-c",
+        "exit 73;",
+        exit_code=2,
     )
     )
-    def test_nobirthtime(self):
-        self.create_test_files()
-        birthtime, mtime, atime = 946598400, 946684800, 946771200
-        os.utime("input/file1", (atime, birthtime))
-        os.utime("input/file1", (atime, mtime))
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input", "--nobirthtime")
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-        sti = os.stat("input/file1")
-        sto = os.stat("output/input/file1")
-        assert same_ts_ns(sti.st_birthtime * 1e9, birthtime * 1e9)
-        assert same_ts_ns(sto.st_birthtime * 1e9, mtime * 1e9)
-        assert same_ts_ns(sti.st_mtime_ns, sto.st_mtime_ns)
-        assert same_ts_ns(sto.st_mtime_ns, mtime * 1e9)
-
-    def test_create_stdin(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        input_data = b"\x00foo\n\nbar\n   \n"
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "-", input=input_data)
-        item = json.loads(self.cmd(f"--repo={self.repository_location}", "list", "test", "--json-lines"))
-        assert item["size"] == len(input_data)
-        assert item["path"] == "stdin"
-        extracted_data = self.cmd(
-            f"--repo={self.repository_location}", "extract", "test", "--stdout", binary_output=True
-        )
-        assert extracted_data == input_data
-
-    def test_create_stdin_checkpointing(self):
-        chunk_size = 1000  # fixed chunker with this size, also volume based checkpointing after that volume
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        input_data = b"X" * (chunk_size * 2 - 1)  # one full and one partial chunk
-        self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            f"--chunker-params=fixed,{chunk_size}",
-            f"--checkpoint-volume={chunk_size}",
-            "test",
-            "-",
-            input=input_data,
-        )
-        # repo looking good overall? checks for rc == 0.
-        self.cmd(f"--repo={self.repository_location}", "check", "--debug")
-        # verify that there are no part files in final archive
-        out = self.cmd(f"--repo={self.repository_location}", "list", "test")
-        assert "stdin.borg_part" not in out
-        # verify full file
-        out = self.cmd(f"--repo={self.repository_location}", "extract", "test", "stdin", "--stdout", binary_output=True)
-        assert out == input_data
-
-    def test_create_erroneous_file(self):
-        chunk_size = 1000  # fixed chunker with this size, also volume based checkpointing after that volume
-        self.create_regular_file(os.path.join(self.input_path, "file1"), size=chunk_size * 2)
-        self.create_regular_file(os.path.join(self.input_path, "file2"), size=chunk_size * 2)
-        self.create_regular_file(os.path.join(self.input_path, "file3"), size=chunk_size * 2)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        flist = "".join(f"input/file{n}\n" for n in range(1, 4))
-        out = self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            f"--chunker-params=fail,{chunk_size},rrrEEErrrr",
-            "--paths-from-stdin",
-            "--list",
-            "test",
-            input=flist.encode(),
-            exit_code=0,
-        )
-        assert "retry: 3 of " in out
-        assert "E input/file2" not in out  # we managed to read it in the 3rd retry (after 3 failed reads)
-        # repo looking good overall? checks for rc == 0.
-        self.cmd(f"--repo={self.repository_location}", "check", "--debug")
-        # check files in created archive
-        out = self.cmd(f"--repo={self.repository_location}", "list", "test")
-        assert "input/file1" in out
-        assert "input/file2" in out
-        assert "input/file3" in out
-
-    @pytest.mark.skipif(is_root(), reason="test must not be run as (fake)root")
-    def test_create_no_permission_file(self):
-        file_path = os.path.join(self.input_path, "file")
-        self.create_regular_file(file_path + "1", size=1000)
-        self.create_regular_file(file_path + "2", size=1000)
-        self.create_regular_file(file_path + "3", size=1000)
-        # revoke read permissions on file2 for everybody, including us:
-        if is_win32:
-            subprocess.run(["icacls.exe", file_path + "2", "/deny", "everyone:(R)"])
-        else:
-            # note: this will NOT take away read permissions for root
-            os.chmod(file_path + "2", 0o000)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        flist = "".join(f"input/file{n}\n" for n in range(1, 4))
-        out = self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "--paths-from-stdin",
-            "--list",
-            "test",
-            input=flist.encode(),
-            exit_code=1,  # WARNING status: could not back up file2.
-        )
-        assert "retry: 1 of " not in out  # retries were NOT attempted!
-        assert "E input/file2" in out  # no permissions!
-        # repo looking good overall? checks for rc == 0.
-        self.cmd(f"--repo={self.repository_location}", "check", "--debug")
-        # check files in created archive
-        out = self.cmd(f"--repo={self.repository_location}", "list", "test")
-        assert "input/file1" in out
-        assert "input/file2" not in out  # it skipped file2
-        assert "input/file3" in out
-
-    def test_sanitized_stdin_name(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "--stdin-name", "./a//path", "test", "-", input=b"")
-        item = json.loads(self.cmd(f"--repo={self.repository_location}", "list", "test", "--json-lines"))
-        assert item["path"] == "a/path"
-
-    def test_dotdot_stdin_name(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "--stdin-name",
-            "foo/../bar",
-            "test",
-            "-",
-            input=b"",
-            exit_code=2,
-        )
-        assert output.endswith("'..' element in path 'foo/../bar'" + os.linesep)
-
-    def test_dot_stdin_name(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(
-            f"--repo={self.repository_location}", "create", "--stdin-name", "./", "test", "-", input=b"", exit_code=2
-        )
-        assert output.endswith("'./' is not a valid file name" + os.linesep)
-
-    def test_create_content_from_command(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        input_data = "some test content"
-        name = "a/b/c"
-        self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "--stdin-name",
-            name,
-            "--content-from-command",
-            "test",
-            "--",
-            "echo",
-            input_data,
-        )
-        item = json.loads(self.cmd(f"--repo={self.repository_location}", "list", "test", "--json-lines"))
-        assert item["size"] == len(input_data) + 1  # `echo` adds newline
-        assert item["path"] == name
-        extracted_data = self.cmd(f"--repo={self.repository_location}", "extract", "test", "--stdout")
-        assert extracted_data == input_data + "\n"
-
-    def test_create_content_from_command_with_failed_command(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "--content-from-command",
-            "test",
-            "--",
-            "sh",
-            "-c",
-            "exit 73;",
-            exit_code=2,
-        )
-        assert output.endswith("Command 'sh' exited with status 73" + os.linesep)
-        archive_list = json.loads(self.cmd(f"--repo={self.repository_location}", "rlist", "--json"))
-        assert archive_list["archives"] == []
-
-    def test_create_content_from_command_missing_command(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(f"--repo={self.repository_location}", "create", "test", "--content-from-command", exit_code=2)
-        assert output.endswith("No command given." + os.linesep)
-
-    def test_create_paths_from_stdin(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("dir1/file2", size=1024 * 80)
-        self.create_regular_file("dir1/file3", size=1024 * 80)
-        self.create_regular_file("file4", size=1024 * 80)
-
-        input_data = b"input/file1\0input/dir1\0input/file4"
-        self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "test",
-            "--paths-from-stdin",
-            "--paths-delimiter",
-            "\\0",
-            input=input_data,
-        )
-        archive_list = self.cmd(f"--repo={self.repository_location}", "list", "test", "--json-lines")
-        paths = [json.loads(line)["path"] for line in archive_list.split("\n") if line]
-        assert paths == ["input/file1", "input/dir1", "input/file4"]
-
-    def test_create_paths_from_command(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("file2", size=1024 * 80)
-        self.create_regular_file("file3", size=1024 * 80)
-        self.create_regular_file("file4", size=1024 * 80)
-
-        input_data = "input/file1\ninput/file2\ninput/file3"
-        if is_win32:
-            with open("filenames.cmd", "w") as script:
-                for filename in input_data.splitlines():
-                    script.write(f"@echo {filename}\n")
-        self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "--paths-from-command",
-            "test",
-            "--",
-            "filenames.cmd" if is_win32 else "echo",
-            input_data,
-        )
-
-        archive_list = self.cmd(f"--repo={self.repository_location}", "list", "test", "--json-lines")
-        paths = [json.loads(line)["path"] for line in archive_list.split("\n") if line]
-        assert paths == ["input/file1", "input/file2", "input/file3"]
-
-    def test_create_paths_from_command_with_failed_command(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "--paths-from-command",
-            "test",
-            "--",
-            "sh",
-            "-c",
-            "exit 73;",
-            exit_code=2,
-        )
-        assert output.endswith("Command 'sh' exited with status 73" + os.linesep)
-        archive_list = json.loads(self.cmd(f"--repo={self.repository_location}", "rlist", "--json"))
-        assert archive_list["archives"] == []
-
-    def test_create_paths_from_command_missing_command(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(f"--repo={self.repository_location}", "create", "test", "--paths-from-command", exit_code=2)
-        assert output.endswith("No command given." + os.linesep)
-
-    def test_create_without_root(self):
-        """test create without a root"""
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", exit_code=2)
-
-    def test_create_pattern_root(self):
-        """test create with only a root pattern"""
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("file2", size=1024 * 80)
-        output = self.cmd(f"--repo={self.repository_location}", "create", "test", "-v", "--list", "--pattern=R input")
-        self.assert_in("A input/file1", output)
-        self.assert_in("A input/file2", output)
-
-    def test_create_pattern(self):
-        """test file patterns during create"""
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("file2", size=1024 * 80)
-        self.create_regular_file("file_important", size=1024 * 80)
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "-v",
-            "--list",
-            "--pattern=+input/file_important",
-            "--pattern=-input/file*",
-            "test",
-            "input",
-        )
-        self.assert_in("A input/file_important", output)
-        self.assert_in("- input/file1", output)
-        self.assert_in("- input/file2", output)
-
-    def test_create_pattern_file(self):
-        """test file patterns during create"""
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("file2", size=1024 * 80)
-        self.create_regular_file("otherfile", size=1024 * 80)
-        self.create_regular_file("file_important", size=1024 * 80)
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "-v",
-            "--list",
-            "--pattern=-input/otherfile",
-            "--patterns-from=" + self.patterns_file_path,
-            "test",
-            "input",
-        )
-        self.assert_in("A input/file_important", output)
-        self.assert_in("- input/file1", output)
-        self.assert_in("- input/file2", output)
-        self.assert_in("- input/otherfile", output)
-
-    def test_create_pattern_exclude_folder_but_recurse(self):
-        """test when patterns exclude a parent folder, but include a child"""
-        self.patterns_file_path2 = os.path.join(self.tmpdir, "patterns2")
-        with open(self.patterns_file_path2, "wb") as fd:
-            fd.write(b"+ input/x/b\n- input/x*\n")
-
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("x/a/foo_a", size=1024 * 80)
-        self.create_regular_file("x/b/foo_b", size=1024 * 80)
-        self.create_regular_file("y/foo_y", size=1024 * 80)
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "-v",
-            "--list",
-            "--patterns-from=" + self.patterns_file_path2,
-            "test",
-            "input",
-        )
-        self.assert_in("- input/x/a/foo_a", output)
-        self.assert_in("A input/x/b/foo_b", output)
-        self.assert_in("A input/y/foo_y", output)
-
-    def test_create_pattern_exclude_folder_no_recurse(self):
-        """test when patterns exclude a parent folder and, but include a child"""
-        self.patterns_file_path2 = os.path.join(self.tmpdir, "patterns2")
-        with open(self.patterns_file_path2, "wb") as fd:
-            fd.write(b"+ input/x/b\n! input/x*\n")
-
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("x/a/foo_a", size=1024 * 80)
-        self.create_regular_file("x/b/foo_b", size=1024 * 80)
-        self.create_regular_file("y/foo_y", size=1024 * 80)
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "-v",
-            "--list",
-            "--patterns-from=" + self.patterns_file_path2,
-            "test",
-            "input",
-        )
-        self.assert_not_in("input/x/a/foo_a", output)
-        self.assert_not_in("input/x/a", output)
-        self.assert_in("A input/y/foo_y", output)
-
-    def test_create_pattern_intermediate_folders_first(self):
-        """test that intermediate folders appear first when patterns exclude a parent folder but include a child"""
-        self.patterns_file_path2 = os.path.join(self.tmpdir, "patterns2")
-        with open(self.patterns_file_path2, "wb") as fd:
-            fd.write(b"+ input/x/a\n+ input/x/b\n- input/x*\n")
-
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-
-        self.create_regular_file("x/a/foo_a", size=1024 * 80)
-        self.create_regular_file("x/b/foo_b", size=1024 * 80)
-        with changedir("input"):
-            self.cmd(
-                f"--repo={self.repository_location}",
-                "create",
-                "--patterns-from=" + self.patterns_file_path2,
-                "test",
-                ".",
-            )
-
-        # list the archive and verify that the "intermediate" folders appear before
-        # their contents
-        out = self.cmd(f"--repo={self.repository_location}", "list", "test", "--format", "{type} {path}{NL}")
-        out_list = out.splitlines()
-
-        self.assert_in("d x/a", out_list)
-        self.assert_in("d x/b", out_list)
-
-        assert out_list.index("d x/a") < out_list.index("- x/a/foo_a")
-        assert out_list.index("d x/b") < out_list.index("- x/b/foo_b")
-
-    def test_create_no_cache_sync(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "rdelete", "--cache-only")
-        create_json = json.loads(
-            self.cmd(
-                f"--repo={self.repository_location}", "create", "--no-cache-sync", "--json", "--error", "test", "input"
-            )
-        )  # ignore experimental warning
-        info_json = json.loads(self.cmd(f"--repo={self.repository_location}", "info", "-a", "test", "--json"))
-        create_stats = create_json["cache"]["stats"]
-        info_stats = info_json["cache"]["stats"]
-        assert create_stats == info_stats
-        self.cmd(f"--repo={self.repository_location}", "rdelete", "--cache-only")
-        self.cmd(f"--repo={self.repository_location}", "create", "--no-cache-sync", "test2", "input")
-        self.cmd(f"--repo={self.repository_location}", "rinfo")
-        self.cmd(f"--repo={self.repository_location}", "check")
-
-    def test_create_archivename_with_placeholder(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        ts = "1999-12-31T23:59:59"
-        name_given = "test-{now}"  # placeholder in archive name gets replaced by borg
-        name_expected = f"test-{ts}"  # placeholder in f-string gets replaced by python
-        self.cmd(f"--repo={self.repository_location}", "create", f"--timestamp={ts}", name_given, "input")
-        list_output = self.cmd(f"--repo={self.repository_location}", "rlist", "--short")
-        assert name_expected in list_output
-
-    def test_exclude_caches(self):
-        self._create_test_caches()
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input", "--exclude-caches")
-        self._assert_test_caches()
-
-    def test_exclude_tagged(self):
-        self._create_test_tagged()
-        self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "test",
-            "input",
-            "--exclude-if-present",
-            ".NOBACKUP",
-            "--exclude-if-present",
-            "00-NOBACKUP",
-        )
-        self._assert_test_tagged()
-
-    def test_exclude_keep_tagged(self):
-        self._create_test_keep_tagged()
-        self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "test",
-            "input",
-            "--exclude-if-present",
-            ".NOBACKUP1",
-            "--exclude-if-present",
-            ".NOBACKUP2",
-            "--exclude-caches",
-            "--keep-exclude-tags",
-        )
-        self._assert_test_keep_tagged()
-
-    def test_path_sanitation(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("dir1/dir2/file", size=1024 * 80)
-        with changedir("input/dir1/dir2"):
-            self.cmd(f"--repo={self.repository_location}", "create", "test", "../../../input/dir1/../dir1/dir2/..")
-        output = self.cmd(f"--repo={self.repository_location}", "list", "test")
-        self.assert_not_in("..", output)
-        self.assert_in(" input/dir1/dir2/file", output)
-
-    def test_exclude_sanitation(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("file2", size=1024 * 80)
-        with changedir("input"):
-            self.cmd(f"--repo={self.repository_location}", "create", "test1", ".", "--exclude=file1")
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test1")
-        self.assert_equal(sorted(os.listdir("output")), ["file2"])
-        with changedir("input"):
-            self.cmd(f"--repo={self.repository_location}", "create", "test2", ".", "--exclude=./file1")
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test2")
-        self.assert_equal(sorted(os.listdir("output")), ["file2"])
-        self.cmd(f"--repo={self.repository_location}", "create", "test3", "input", "--exclude=input/./file1")
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test3")
-        self.assert_equal(sorted(os.listdir("output/input")), ["file2"])
-
-    def test_repeated_files(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input", "input")
-
-    @pytest.mark.skipif("BORG_TESTS_IGNORE_MODES" in os.environ, reason="modes unreliable")
-    @pytest.mark.skipif(is_win32, reason="modes unavailable on Windows")
-    def test_umask(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        mode = os.stat(self.repository_path).st_mode
-        self.assertEqual(stat.S_IMODE(mode), 0o700)
-
-    def test_create_dry_run(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "--dry-run", "test", "input")
-        # Make sure no archive has been created
-        with Repository(self.repository_path) as repository:
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-        self.assert_equal(len(manifest.archives), 0)
-
-    def test_progress_on(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(f"--repo={self.repository_location}", "create", "test4", "input", "--progress")
-        self.assert_in("\r", output)
-
-    def test_progress_off(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(f"--repo={self.repository_location}", "create", "test5", "input")
-        self.assert_not_in("\r", output)
-
-    def test_file_status(self):
-        """test that various file status show expected results
-
-        clearly incomplete: only tests for the weird "unchanged" status for now"""
-        self.create_regular_file("file1", size=1024 * 80)
-        time.sleep(1)  # file2 must have newer timestamps than file1
-        self.create_regular_file("file2", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(f"--repo={self.repository_location}", "create", "--list", "test", "input")
-        self.assert_in("A input/file1", output)
-        self.assert_in("A input/file2", output)
-        # should find first file as unmodified
-        output = self.cmd(f"--repo={self.repository_location}", "create", "--list", "test2", "input")
-        self.assert_in("U input/file1", output)
-        # this is expected, although surprising, for why, see:
-        # https://borgbackup.readthedocs.org/en/latest/faq.html#i-am-seeing-a-added-status-for-a-unchanged-file
-        self.assert_in("A input/file2", output)
-
-    @pytest.mark.skipif(
-        is_win32, reason="ctime attribute is file creation time on Windows"
-    )  # see https://docs.python.org/3/library/os.html#os.stat_result.st_ctime
-    def test_file_status_cs_cache_mode(self):
-        """test that a changed file with faked "previous" mtime still gets backed up in ctime,size cache_mode"""
-        self.create_regular_file("file1", contents=b"123")
-        time.sleep(1)  # file2 must have newer timestamps than file1
-        self.create_regular_file("file2", size=10)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(
-            f"--repo={self.repository_location}", "create", "test1", "input", "--list", "--files-cache=ctime,size"
-        )
-        # modify file1, but cheat with the mtime (and atime) and also keep same size:
-        st = os.stat("input/file1")
-        self.create_regular_file("file1", contents=b"321")
-        os.utime("input/file1", ns=(st.st_atime_ns, st.st_mtime_ns))
-        # this mode uses ctime for change detection, so it should find file1 as modified
-        output = self.cmd(
-            f"--repo={self.repository_location}", "create", "test2", "input", "--list", "--files-cache=ctime,size"
-        )
-        self.assert_in("M input/file1", output)
-
-    def test_file_status_ms_cache_mode(self):
-        """test that a chmod'ed file with no content changes does not get chunked again in mtime,size cache_mode"""
-        self.create_regular_file("file1", size=10)
-        time.sleep(1)  # file2 must have newer timestamps than file1
-        self.create_regular_file("file2", size=10)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(
-            f"--repo={self.repository_location}", "create", "--list", "--files-cache=mtime,size", "test1", "input"
-        )
-        # change mode of file1, no content change:
-        st = os.stat("input/file1")
-        os.chmod("input/file1", st.st_mode ^ stat.S_IRWXO)  # this triggers a ctime change, but mtime is unchanged
-        # this mode uses mtime for change detection, so it should find file1 as unmodified
-        output = self.cmd(
-            f"--repo={self.repository_location}", "create", "--list", "--files-cache=mtime,size", "test2", "input"
-        )
-        self.assert_in("U input/file1", output)
-
-    def test_file_status_rc_cache_mode(self):
-        """test that files get rechunked unconditionally in rechunk,ctime cache mode"""
-        self.create_regular_file("file1", size=10)
-        time.sleep(1)  # file2 must have newer timestamps than file1
-        self.create_regular_file("file2", size=10)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(
-            f"--repo={self.repository_location}", "create", "--list", "--files-cache=rechunk,ctime", "test1", "input"
-        )
-        # no changes here, but this mode rechunks unconditionally
-        output = self.cmd(
-            f"--repo={self.repository_location}", "create", "--list", "--files-cache=rechunk,ctime", "test2", "input"
-        )
-        self.assert_in("A input/file1", output)
-
-    def test_file_status_excluded(self):
-        """test that excluded paths are listed"""
-
-        self.create_regular_file("file1", size=1024 * 80)
-        time.sleep(1)  # file2 must have newer timestamps than file1
-        self.create_regular_file("file2", size=1024 * 80)
-        if has_lchflags:
-            self.create_regular_file("file3", size=1024 * 80)
-            platform.set_flags(os.path.join(self.input_path, "file3"), stat.UF_NODUMP)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(f"--repo={self.repository_location}", "create", "--list", "--exclude-nodump", "test", "input")
-        self.assert_in("A input/file1", output)
-        self.assert_in("A input/file2", output)
-        if has_lchflags:
-            self.assert_in("- input/file3", output)
-        # should find second file as excluded
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "test1",
-            "input",
-            "--list",
-            "--exclude-nodump",
-            "--exclude",
-            "*/file2",
-        )
-        self.assert_in("U input/file1", output)
-        self.assert_in("- input/file2", output)
-        if has_lchflags:
-            self.assert_in("- input/file3", output)
-
-    def test_file_status_counters(self):
-        """Test file status counters in the stats of `borg create --stats`"""
-
-        def to_dict(borg_create_output):
-            borg_create_output = borg_create_output.strip().splitlines()
-            borg_create_output = [line.split(":", 1) for line in borg_create_output]
-            borg_create_output = {
-                key: int(value)
-                for key, value in borg_create_output
-                if key in ("Added files", "Unchanged files", "Modified files")
-            }
-            return borg_create_output
-
-        # Test case set up: create a repository
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        # Archive an empty dir
-        result = self.cmd(f"--repo={self.repository_location}", "create", "--stats", "test_archive", self.input_path)
-        result = to_dict(result)
-        assert result["Added files"] == 0
-        assert result["Unchanged files"] == 0
-        assert result["Modified files"] == 0
-        # Archive a dir with two added files
-        self.create_regular_file("testfile1", contents=b"test1")
-        time.sleep(1.0 if is_darwin else 0.01)  # testfile2 must have newer timestamps than testfile1
-        self.create_regular_file("testfile2", contents=b"test2")
-        result = self.cmd(f"--repo={self.repository_location}", "create", "--stats", "test_archive2", self.input_path)
-        result = to_dict(result)
-        assert result["Added files"] == 2
-        assert result["Unchanged files"] == 0
-        assert result["Modified files"] == 0
-        # Archive a dir with 1 unmodified file and 1 modified
-        self.create_regular_file("testfile1", contents=b"new data")
-        result = self.cmd(f"--repo={self.repository_location}", "create", "--stats", "test_archive3", self.input_path)
-        result = to_dict(result)
-        # Should process testfile2 as added because of
-        # https://borgbackup.readthedocs.io/en/stable/faq.html#i-am-seeing-a-added-status-for-an-unchanged-file
-        assert result["Added files"] == 1
-        assert result["Unchanged files"] == 0
-        assert result["Modified files"] == 1
-
-    def test_create_json(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        create_info = json.loads(self.cmd(f"--repo={self.repository_location}", "create", "--json", "test", "input"))
-        # The usual keys
-        assert "encryption" in create_info
-        assert "repository" in create_info
-        assert "cache" in create_info
-        assert "last_modified" in create_info["repository"]
-
-        archive = create_info["archive"]
-        assert archive["name"] == "test"
-        assert isinstance(archive["command_line"], str)
-        assert isinstance(archive["duration"], float)
-        assert len(archive["id"]) == 64
-        assert "stats" in archive
-
-    def test_create_topical(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        time.sleep(1)  # file2 must have newer timestamps than file1
-        self.create_regular_file("file2", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        # no listing by default
-        output = self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.assert_not_in("file1", output)
-        # shouldn't be listed even if unchanged
-        output = self.cmd(f"--repo={self.repository_location}", "create", "test0", "input")
-        self.assert_not_in("file1", output)
-        # should list the file as unchanged
-        output = self.cmd(f"--repo={self.repository_location}", "create", "test1", "input", "--list", "--filter=U")
-        self.assert_in("file1", output)
-        # should *not* list the file as changed
-        output = self.cmd(f"--repo={self.repository_location}", "create", "test2", "input", "--list", "--filter=AM")
-        self.assert_not_in("file1", output)
-        # change the file
-        self.create_regular_file("file1", size=1024 * 100)
-        # should list the file as changed
-        output = self.cmd(f"--repo={self.repository_location}", "create", "test3", "input", "--list", "--filter=AM")
-        self.assert_in("file1", output)
-
-    @pytest.mark.skipif(not are_fifos_supported() or is_cygwin, reason="FIFOs not supported, hangs on cygwin")
-    def test_create_read_special_symlink(self):
-        from threading import Thread
-
-        def fifo_feeder(fifo_fn, data):
-            fd = os.open(fifo_fn, os.O_WRONLY)
-            try:
-                os.write(fd, data)
-            finally:
-                os.close(fd)
-
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        data = b"foobar" * 1000
-
-        fifo_fn = os.path.join(self.input_path, "fifo")
-        link_fn = os.path.join(self.input_path, "link_fifo")
-        os.mkfifo(fifo_fn)
-        os.symlink(fifo_fn, link_fn)
-
-        t = Thread(target=fifo_feeder, args=(fifo_fn, data))
-        t.start()
+    assert output.endswith("Command 'sh' exited with status 73" + os.linesep)
+    archive_list = json.loads(cmd(archiver, f"--repo={repo_location}", "rlist", "--json"))
+    assert archive_list["archives"] == []
+
+
+def test_create_content_from_command_missing_command(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test", "--content-from-command", exit_code=2)
+    assert output.endswith("No command given." + os.linesep)
+
+
+def test_create_paths_from_stdin(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "dir1/file2", size=1024 * 80)
+    create_regular_file(input_path, "dir1/file3", size=1024 * 80)
+    create_regular_file(input_path, "file4", size=1024 * 80)
+
+    input_data = b"input/file1\0input/dir1\0input/file4"
+    cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "test",
+        "--paths-from-stdin",
+        "--paths-delimiter",
+        "\\0",
+        input=input_data,
+    )
+    archive_list = cmd(archiver, f"--repo={repo_location}", "list", "test", "--json-lines")
+    paths = [json.loads(line)["path"] for line in archive_list.split("\n") if line]
+    assert paths == ["input/file1", "input/dir1", "input/file4"]
+
+
+def test_create_paths_from_command(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    create_regular_file(input_path, "file3", size=1024 * 80)
+    create_regular_file(input_path, "file4", size=1024 * 80)
+
+    input_data = "input/file1\ninput/file2\ninput/file3"
+    if is_win32:
+        with open("filenames.cmd", "w") as script:
+            for filename in input_data.splitlines():
+                script.write(f"@echo {filename}\n")
+    cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "--paths-from-command",
+        "test",
+        "--",
+        "filenames.cmd" if is_win32 else "echo",
+        input_data,
+    )
+
+    archive_list = cmd(archiver, f"--repo={repo_location}", "list", "test", "--json-lines")
+    paths = [json.loads(line)["path"] for line in archive_list.split("\n") if line]
+    assert paths == ["input/file1", "input/file2", "input/file3"]
+
+
+def test_create_paths_from_command_with_failed_command(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "--paths-from-command",
+        "test",
+        "--",
+        "sh",
+        "-c",
+        "exit 73;",
+        exit_code=2,
+    )
+    assert output.endswith("Command 'sh' exited with status 73" + os.linesep)
+    archive_list = json.loads(cmd(archiver, f"--repo={repo_location}", "rlist", "--json"))
+    assert archive_list["archives"] == []
+
+
+def test_create_paths_from_command_missing_command(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test", "--paths-from-command", exit_code=2)
+    assert output.endswith("No command given." + os.linesep)
+
+
+def test_create_without_root(archivers, request):
+    """test create without a root"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", exit_code=2)
+
+
+def test_create_pattern_root(archivers, request):
+    """test create with only a root pattern"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test", "-v", "--list", "--pattern=R input")
+    assert "A input/file1" in output
+    assert "A input/file2" in output
+
+
+def test_create_pattern(archivers, request):
+    """test file patterns during create"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    create_regular_file(input_path, "file_important", size=1024 * 80)
+    output = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "-v",
+        "--list",
+        "--pattern=+input/file_important",
+        "--pattern=-input/file*",
+        "test",
+        "input",
+    )
+    assert "A input/file_important" in output
+    assert "- input/file1" in output
+    assert "- input/file2" in output
+
+
+def test_create_pattern_file(archivers, request):
+    """test file patterns during create"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    create_regular_file(input_path, "otherfile", size=1024 * 80)
+    create_regular_file(input_path, "file_important", size=1024 * 80)
+    output = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "-v",
+        "--list",
+        "--pattern=-input/otherfile",
+        "--patterns-from=" + archiver.patterns_file_path,
+        "test",
+        "input",
+    )
+    assert "A input/file_important" in output
+    assert "- input/file1" in output
+    assert "- input/file2" in output
+    assert "- input/otherfile" in output
+
+
+def test_create_pattern_exclude_folder_but_recurse(archivers, request):
+    """test when patterns exclude a parent folder, but include a child"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    patterns_file_path2 = os.path.join(archiver.tmpdir, "patterns2")
+    with open(patterns_file_path2, "wb") as fd:
+        fd.write(b"+ input/x/b\n- input/x*\n")
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "x/a/foo_a", size=1024 * 80)
+    create_regular_file(input_path, "x/b/foo_b", size=1024 * 80)
+    create_regular_file(input_path, "y/foo_y", size=1024 * 80)
+    output = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "-v",
+        "--list",
+        "--patterns-from=" + patterns_file_path2,
+        "test",
+        "input",
+    )
+    assert "- input/x/a/foo_a" in output
+    assert "A input/x/b/foo_b" in output
+    assert "A input/y/foo_y" in output
+
+
+def test_create_pattern_exclude_folder_no_recurse(archivers, request):
+    """test when patterns exclude a parent folder, but include a child"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    patterns_file_path2 = os.path.join(archiver.tmpdir, "patterns2")
+    with open(patterns_file_path2, "wb") as fd:
+        fd.write(b"+ input/x/b\n! input/x*\n")
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "x/a/foo_a", size=1024 * 80)
+    create_regular_file(input_path, "x/b/foo_b", size=1024 * 80)
+    create_regular_file(input_path, "y/foo_y", size=1024 * 80)
+    output = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "-v",
+        "--list",
+        "--patterns-from=" + patterns_file_path2,
+        "test",
+        "input",
+    )
+    assert "input/x/a/foo_a" not in output
+    assert "input/x/a" not in output
+    assert "A input/y/foo_y" in output
+
+
+def test_create_pattern_intermediate_folders_first(archivers, request):
+    """test that intermediate folders appear first when patterns exclude a parent folder but include a child"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    patterns_file_path2 = os.path.join(archiver.tmpdir, "patterns2")
+    with open(patterns_file_path2, "wb") as fd:
+        fd.write(b"+ input/x/a\n+ input/x/b\n- input/x*\n")
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+
+    create_regular_file(input_path, "x/a/foo_a", size=1024 * 80)
+    create_regular_file(input_path, "x/b/foo_b", size=1024 * 80)
+    with changedir("input"):
+        cmd(archiver, f"--repo={repo_location}", "create", "--patterns-from=" + patterns_file_path2, "test", ".")
+
+    # list the archive and verify that the "intermediate" folders appear before
+    # their contents
+    out = cmd(archiver, f"--repo={repo_location}", "list", "test", "--format", "{type} {path}{NL}")
+    out_list = out.splitlines()
+
+    assert "d x/a" in out_list
+    assert "d x/b" in out_list
+    assert out_list.index("d x/a") < out_list.index("- x/a/foo_a")
+    assert out_list.index("d x/b") < out_list.index("- x/b/foo_b")
+
+
+def test_create_no_cache_sync(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_test_files(input_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "rdelete", "--cache-only")
+    create_json = json.loads(
+        cmd(archiver, f"--repo={repo_location}", "create", "--no-cache-sync", "--json", "--error", "test", "input")
+    )  # ignore experimental warning
+    info_json = json.loads(cmd(archiver, f"--repo={repo_location}", "info", "-a", "test", "--json"))
+    create_stats = create_json["cache"]["stats"]
+    info_stats = info_json["cache"]["stats"]
+    assert create_stats == info_stats
+    cmd(archiver, f"--repo={repo_location}", "rdelete", "--cache-only")
+    cmd(archiver, f"--repo={repo_location}", "create", "--no-cache-sync", "test2", "input")
+    cmd(archiver, f"--repo={repo_location}", "rinfo")
+    cmd(archiver, f"--repo={repo_location}", "check")
+
+
+def test_create_archivename_with_placeholder(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_test_files(input_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    ts = "1999-12-31T23:59:59"
+    name_given = "test-{now}"  # placeholder in archive name gets replaced by borg
+    name_expected = f"test-{ts}"  # placeholder in f-string gets replaced by python
+    cmd(archiver, f"--repo={repo_location}", "create", f"--timestamp={ts}", name_given, "input")
+    list_output = cmd(archiver, f"--repo={repo_location}", "rlist", "--short")
+    assert name_expected in list_output
+
+
+def test_exclude_caches(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    _create_test_caches(archiver)
+    cmd(archiver, f"--repo={archiver.repository_location}", "create", "test", "input", "--exclude-caches")
+    _assert_test_caches(archiver)
+
+
+def test_exclude_tagged(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    _create_test_tagged(archiver)
+    cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "test",
+        "input",
+        "--exclude-if-present",
+        ".NOBACKUP",
+        "--exclude-if-present",
+        "00-NOBACKUP",
+    )
+    _assert_test_tagged(archiver)
+
+
+def test_exclude_keep_tagged(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    _create_test_keep_tagged(archiver)
+    cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "test",
+        "input",
+        "--exclude-if-present",
+        ".NOBACKUP1",
+        "--exclude-if-present",
+        ".NOBACKUP2",
+        "--exclude-caches",
+        "--keep-exclude-tags",
+    )
+    _assert_test_keep_tagged(archiver)
+
+
+def test_path_sanitation(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "dir1/dir2/file", size=1024 * 80)
+    with changedir("input/dir1/dir2"):
+        cmd(archiver, f"--repo={repo_location}", "create", "test", "../../../input/dir1/../dir1/dir2/..")
+    output = cmd(archiver, f"--repo={repo_location}", "list", "test")
+    assert ".." not in output
+    assert " input/dir1/dir2/file" in output
+
+
+def test_exclude_sanitation(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    with changedir("input"):
+        cmd(archiver, f"--repo={repo_location}", "create", "test1", ".", "--exclude=file1")
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test1")
+    assert sorted(os.listdir("output")) == ["file2"]
+    with changedir("input"):
+        cmd(archiver, f"--repo={repo_location}", "create", "test2", ".", "--exclude=./file1")
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test2")
+    assert sorted(os.listdir("output")) == ["file2"]
+    cmd(archiver, f"--repo={repo_location}", "create", "test3", "input", "--exclude=input/./file1")
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test3")
+    assert sorted(os.listdir("output/input")) == ["file2"]
+
+
+def test_repeated_files(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input", "input")
+
+
+@pytest.mark.skipif("BORG_TESTS_IGNORE_MODES" in os.environ, reason="modes unreliable")
+@pytest.mark.skipif(is_win32, reason="modes unavailable on Windows")
+def test_umask(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, input_path = archiver.repository_location, archiver.repository_path, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    mode = os.stat(repo_path).st_mode
+    assert stat.S_IMODE(mode) == 0o700
+
+
+def test_create_dry_run(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "--dry-run", "test", "input")
+    # Make sure no archive has been created
+    with Repository(repo_path) as repository:
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+    assert len(manifest.archives) == 0
+
+
+def test_progress_on(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test4", "input", "--progress")
+    assert "\r" in output
+
+
+def test_progress_off(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test5", "input")
+    assert "\r" not in output
+
+
+def test_file_status(archivers, request):
+    """test that various file status show expected results
+    clearly incomplete: only tests for the weird "unchanged" status for now"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    time.sleep(1)  # file2 must have newer timestamps than file1
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(archiver, f"--repo={repo_location}", "create", "--list", "test", "input")
+    assert "A input/file1" in output
+    assert "A input/file2" in output
+    # should find first file as unmodified
+    output = cmd(archiver, f"--repo={repo_location}", "create", "--list", "test2", "input")
+    assert "U input/file1" in output
+    # although surprising, this is expected. For why, see:
+    # https://borgbackup.readthedocs.org/en/latest/faq.html#i-am-seeing-a-added-status-for-a-unchanged-file
+    assert "A input/file2" in output
+
+
+@pytest.mark.skipif(
+    is_win32, reason="ctime attribute is file creation time on Windows"
+)  # see https://docs.python.org/3/library/os.html#os.stat_result.st_ctime
+def test_file_status_cs_cache_mode(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    """test that a changed file with faked "previous" mtime still gets backed up in ctime,size cache_mode"""
+    create_regular_file(input_path, "file1", contents=b"123")
+    time.sleep(1)  # file2 must have newer timestamps than file1
+    create_regular_file(input_path, "file2", size=10)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test1", "input", "--list", "--files-cache=ctime,size")
+    # modify file1, but cheat with the mtime (and atime) and also keep same size:
+    st = os.stat("input/file1")
+    create_regular_file(input_path, "file1", contents=b"321")
+    os.utime("input/file1", ns=(st.st_atime_ns, st.st_mtime_ns))
+    # this mode uses ctime for change detection, so it should find file1 as modified
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test2", "input", "--list", "--files-cache=ctime,size")
+    assert "M input/file1" in output
+
+
+def test_file_status_ms_cache_mode(archivers, request):
+    """test that a chmod'ed file with no content changes does not get chunked again in mtime,size cache_mode"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=10)
+    time.sleep(1)  # file2 must have newer timestamps than file1
+    create_regular_file(input_path, "file2", size=10)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(archiver, f"--repo={repo_location}", "create", "--list", "--files-cache=mtime,size", "test1", "input")
+    # change mode of file1, no content change:
+    st = os.stat("input/file1")
+    os.chmod("input/file1", st.st_mode ^ stat.S_IRWXO)  # this triggers a ctime change, but mtime is unchanged
+    # this mode uses mtime for change detection, so it should find file1 as unmodified
+    output = cmd(archiver, f"--repo={repo_location}", "create", "--list", "--files-cache=mtime,size", "test2", "input")
+    assert "U input/file1" in output
+
+
+def test_file_status_rc_cache_mode(archivers, request):
+    """test that files get rechunked unconditionally in rechunk,ctime cache mode"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=10)
+    time.sleep(1)  # file2 must have newer timestamps than file1
+    create_regular_file(input_path, "file2", size=10)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(
+        archiver, f"--repo={repo_location}", "create", "--list", "--files-cache=rechunk,ctime", "test1", "input"
+    )
+    # no changes here, but this mode rechunks unconditionally
+    output = cmd(
+        archiver, f"--repo={repo_location}", "create", "--list", "--files-cache=rechunk,ctime", "test2", "input"
+    )
+    assert "A input/file1" in output
+
+
+def test_file_status_excluded(archivers, request):
+    """test that excluded paths are listed"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    time.sleep(1)  # file2 must have newer timestamps than file1
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    if has_lchflags:
+        create_regular_file(input_path, "file3", size=1024 * 80)
+        platform.set_flags(os.path.join(input_path, "file3"), stat.UF_NODUMP)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(archiver, f"--repo={repo_location}", "create", "--list", "--exclude-nodump", "test", "input")
+    assert "A input/file1" in output
+    assert "A input/file2" in output
+    if has_lchflags:
+        assert "- input/file3" in output
+    # should find second file as excluded
+    output = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "test1",
+        "input",
+        "--list",
+        "--exclude-nodump",
+        "--exclude",
+        "*/file2",
+    )
+    assert "U input/file1" in output
+    assert "- input/file2" in output
+    if has_lchflags:
+        assert "- input/file3" in output
+
+
+def test_file_status_counters(archivers, request):
+    """Test file status counters in the stats of `borg create --stats`"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    def to_dict(borg_create_output):
+        borg_create_output = borg_create_output.strip().splitlines()
+        borg_create_output = [line.split(":", 1) for line in borg_create_output]
+        borg_create_output = {
+            key: int(value)
+            for key, value in borg_create_output
+            if key in ("Added files", "Unchanged files", "Modified files")
+        }
+        return borg_create_output
+
+    # Test case set up: create a repository
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    # Archive an empty dir
+    result = cmd(archiver, f"--repo={repo_location}", "create", "--stats", "test_archive", input_path)
+    result = to_dict(result)
+    assert result["Added files"] == 0
+    assert result["Unchanged files"] == 0
+    assert result["Modified files"] == 0
+    # Archive a dir with two added files
+    create_regular_file(input_path, "testfile1", contents=b"test1")
+    time.sleep(1.0 if is_darwin else 0.01)  # testfile2 must have newer timestamps than testfile1
+    create_regular_file(input_path, "testfile2", contents=b"test2")
+    result = cmd(archiver, f"--repo={repo_location}", "create", "--stats", "test_archive2", input_path)
+    result = to_dict(result)
+    assert result["Added files"] == 2
+    assert result["Unchanged files"] == 0
+    assert result["Modified files"] == 0
+    # Archive a dir with 1 unmodified file and 1 modified
+    create_regular_file(input_path, "testfile1", contents=b"new data")
+    result = cmd(archiver, f"--repo={repo_location}", "create", "--stats", "test_archive3", input_path)
+    result = to_dict(result)
+    # Should process testfile2 as added because of
+    # https://borgbackup.readthedocs.io/en/stable/faq.html#i-am-seeing-a-added-status-for-an-unchanged-file
+    assert result["Added files"] == 1
+    assert result["Unchanged files"] == 0
+    assert result["Modified files"] == 1
+
+
+def test_create_json(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_info = json.loads(cmd(archiver, f"--repo={repo_location}", "create", "--json", "test", "input"))
+    # The usual keys
+    assert "encryption" in create_info
+    assert "repository" in create_info
+    assert "cache" in create_info
+    assert "last_modified" in create_info["repository"]
+
+    archive = create_info["archive"]
+    assert archive["name"] == "test"
+    assert isinstance(archive["command_line"], str)
+    assert isinstance(archive["duration"], float)
+    assert len(archive["id"]) == 64
+    assert "stats" in archive
+
+
+def test_create_topical(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    time.sleep(1)  # file2 must have newer timestamps than file1
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    # no listing by default
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    assert "file1" not in output
+    # shouldn't be listed even if unchanged
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test0", "input")
+    assert "file1" not in output
+    # should list the file as unchanged
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test1", "input", "--list", "--filter=U")
+    assert "file1" in output
+    # should *not* list the file as changed
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test2", "input", "--list", "--filter=AM")
+    assert "file1" not in output
+    # change the file
+    create_regular_file(input_path, "file1", size=1024 * 100)
+    # should list the file as changed
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test3", "input", "--list", "--filter=AM")
+    assert "file1" in output
+
+
+@pytest.mark.skipif(not are_fifos_supported() or is_cygwin, reason="FIFOs not supported, hangs on cygwin")
+def test_create_read_special_symlink(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    from threading import Thread
+
+    def fifo_feeder(fifo_fn, data):
+        fd = os.open(fifo_fn, os.O_WRONLY)
+        try:
+            os.write(fd, data)
+        finally:
+            os.close(fd)
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    data = b"foobar" * 1000
+
+    fifo_fn = os.path.join(input_path, "fifo")
+    link_fn = os.path.join(input_path, "link_fifo")
+    os.mkfifo(fifo_fn)
+    os.symlink(fifo_fn, link_fn)
+
+    t = Thread(target=fifo_feeder, args=(fifo_fn, data))
+    t.start()
+    try:
+        cmd(archiver, f"--repo={repo_location}", "create", "--read-special", "test", "input/link_fifo")
+    finally:
+        # In case `borg create` failed to open FIFO, read all data to avoid join() hanging.
+        fd = os.open(fifo_fn, os.O_RDONLY | os.O_NONBLOCK)
         try:
         try:
-            self.cmd(f"--repo={self.repository_location}", "create", "--read-special", "test", "input/link_fifo")
+            os.read(fd, len(data))
+        except OSError:
+            # fails on FreeBSD 13 with BlockingIOError
+            pass
         finally:
         finally:
-            # In case `borg create` failed to open FIFO, read all data to avoid join() hanging.
-            fd = os.open(fifo_fn, os.O_RDONLY | os.O_NONBLOCK)
-            try:
-                os.read(fd, len(data))
-            except OSError:
-                # fails on FreeBSD 13 with BlockingIOError
-                pass
-            finally:
-                os.close(fd)
-            t.join()
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-            fifo_fn = "input/link_fifo"
-            with open(fifo_fn, "rb") as f:
-                extracted_data = f.read()
-        assert extracted_data == data
-
-    @pytest.mark.skipif(not are_symlinks_supported(), reason="symlinks not supported")
-    def test_create_read_special_broken_symlink(self):
-        os.symlink("somewhere does not exist", os.path.join(self.input_path, "link"))
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "--read-special", "test", "input")
-        output = self.cmd(f"--repo={self.repository_location}", "list", "test")
-        assert "input/link -> somewhere does not exist" in output
-
-    def test_log_json(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        log = self.cmd(
-            f"--repo={self.repository_location}", "create", "test", "input", "--log-json", "--list", "--debug"
-        )
-        messages = {}  # type -> message, one of each kind
-        for line in log.splitlines():
-            msg = json.loads(line)
-            messages[msg["type"]] = msg
-
-        file_status = messages["file_status"]
-        assert "status" in file_status
-        assert file_status["path"].startswith("input")
-
-        log_message = messages["log_message"]
-        assert isinstance(log_message["time"], float)
-        assert log_message["levelname"] == "DEBUG"  # there should only be DEBUG messages
-        assert isinstance(log_message["message"], str)
-
-    def test_common_options(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        log = self.cmd(f"--repo={self.repository_location}", "--debug", "create", "test", "input")
-        assert "security: read previous location" in log
-
-    def test_hashing_time(self):
-        def extract_hashing_time(borg_create_output):
-            borg_create_output = borg_create_output.strip().splitlines()
-            borg_create_output = [line.split(":", 1) for line in borg_create_output]
-            hashing_time = [line for line in borg_create_output if line[0] == "Time spent in hashing"].pop()
-            hashing_time = hashing_time[1]
-            hashing_time = float(hashing_time.removesuffix(" seconds"))
-            return hashing_time
-
-        # Test case set up: create a repository and a file
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        self.create_regular_file("testfile", contents=randbytes(50000000))
-        # Archive
-        result = self.cmd(f"--repo={self.repository_location}", "create", "--stats", "test_archive", self.input_path)
-        hashing_time = extract_hashing_time(result)
-
-        assert hashing_time > 0.0
-
-    def test_chunking_time(self):
-        def extract_chunking_time(borg_create_output):
-            borg_create_output = borg_create_output.strip().splitlines()
-            borg_create_output = [line.split(":", 1) for line in borg_create_output]
-            chunking_time = [line for line in borg_create_output if line[0] == "Time spent in chunking"].pop()
-            chunking_time = chunking_time[1]
-            chunking_time = float(chunking_time.removesuffix(" seconds"))
-            return chunking_time
-
-        # Test case set up: create a repository and a file
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("testfile", contents=randbytes(50000000))
-        # Archive
-        result = self.cmd(f"--repo={self.repository_location}", "create", "--stats", "test_archive", self.input_path)
-        chunking_time = extract_chunking_time(result)
-
-        assert chunking_time > 0.0
-
-
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    """run the same tests, but with a remote repository"""
-
-
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    """runs the same tests, but via the borg binary"""
-
-    @unittest.skip("test_basic_functionality seems incompatible with fakeroot and/or the binary.")
-    def test_basic_functionality(self):
-        pass
+            os.close(fd)
+        t.join()
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+        fifo_fn = "input/link_fifo"
+        with open(fifo_fn, "rb") as f:
+            extracted_data = f.read()
+    assert extracted_data == data
+
+
+@pytest.mark.skipif(not are_symlinks_supported(), reason="symlinks not supported")
+def test_create_read_special_broken_symlink(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    os.symlink("somewhere does not exist", os.path.join(input_path, "link"))
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "--read-special", "test", "input")
+    output = cmd(archiver, f"--repo={repo_location}", "list", "test")
+    assert "input/link -> somewhere does not exist" in output
+
+
+def test_log_json(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_test_files(input_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    log = cmd(archiver, f"--repo={repo_location}", "create", "test", "input", "--log-json", "--list", "--debug")
+    messages = {}  # type -> message, one of each kind
+    for line in log.splitlines():
+        msg = json.loads(line)
+        messages[msg["type"]] = msg
+
+    file_status = messages["file_status"]
+    assert "status" in file_status
+    assert file_status["path"].startswith("input")
+
+    log_message = messages["log_message"]
+    assert isinstance(log_message["time"], float)
+    assert log_message["levelname"] == "DEBUG"  # there should only be DEBUG messages
+    assert isinstance(log_message["message"], str)
+
+
+def test_common_options(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_test_files(input_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    log = cmd(archiver, f"--repo={repo_location}", "--debug", "create", "test", "input")
+    assert "security: read previous location" in log
+
+
+def test_hashing_time(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    def extract_hashing_time(borg_create_output):
+        borg_create_output = borg_create_output.strip().splitlines()
+        borg_create_output = [line.split(":", 1) for line in borg_create_output]
+        hashing_time = [line for line in borg_create_output if line[0] == "Time spent in hashing"].pop()
+        hashing_time = hashing_time[1]
+        hashing_time = float(hashing_time.removesuffix(" seconds"))
+        return hashing_time
+
+    # Test case set up: create a repository and a file
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    create_regular_file(input_path, "testfile", contents=randbytes(50000000))
+    # Archive
+    result = cmd(archiver, f"--repo={repo_location}", "create", "--stats", "test_archive", input_path)
+    hashing_time = extract_hashing_time(result)
+
+    assert hashing_time > 0.0
+
+
+def test_chunking_time(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    def extract_chunking_time(borg_create_output):
+        borg_create_output = borg_create_output.strip().splitlines()
+        borg_create_output = [line.split(":", 1) for line in borg_create_output]
+        chunking_time = [line for line in borg_create_output if line[0] == "Time spent in chunking"].pop()
+        chunking_time = chunking_time[1]
+        chunking_time = float(chunking_time.removesuffix(" seconds"))
+        return chunking_time
+
+    # Test case set up: create a repository and a file
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "testfile", contents=randbytes(50000000))
+    # Archive
+    result = cmd(archiver, f"--repo={repo_location}", "create", "--stats", "test_archive", input_path)
+    chunking_time = extract_chunking_time(result)
+
+    assert chunking_time > 0.0