2
0
Эх сурвалжийг харах

Merge pull request #7722 from bigtedde/archiver-tests

Archiver folder - all tests converted from unittest to pytest
TW 1 жил өмнө
parent
commit
e1cd38a0df
35 өөрчлөгдсөн 6219 нэмэгдсэн , 5539 устгасан
  1. 83 1
      conftest.py
  2. 421 260
      src/borg/testsuite/archiver/__init__.py
  3. 28 26
      src/borg/testsuite/archiver/argparsing.py
  4. 6 6
      src/borg/testsuite/archiver/benchmark_cmd.py
  5. 132 115
      src/borg/testsuite/archiver/bypass_lock_option.py
  6. 310 270
      src/borg/testsuite/archiver/check_cmd.py
  7. 429 376
      src/borg/testsuite/archiver/checks.py
  8. 43 45
      src/borg/testsuite/archiver/config_cmd.py
  9. 101 90
      src/borg/testsuite/archiver/corruption.py
  10. 1094 946
      src/borg/testsuite/archiver/create_cmd.py
  11. 201 176
      src/borg/testsuite/archiver/debug_cmds.py
  12. 77 72
      src/borg/testsuite/archiver/delete_cmd.py
  13. 286 310
      src/borg/testsuite/archiver/diff_cmd.py
  14. 8 7
      src/borg/testsuite/archiver/disk_full.py
  15. 742 674
      src/borg/testsuite/archiver/extract_cmd.py
  16. 17 19
      src/borg/testsuite/archiver/help_cmd.py
  17. 46 54
      src/borg/testsuite/archiver/info_cmd.py
  18. 255 227
      src/borg/testsuite/archiver/key_cmds.py
  19. 72 75
      src/borg/testsuite/archiver/list_cmd.py
  20. 14 18
      src/borg/testsuite/archiver/lock_cmds.py
  21. 322 320
      src/borg/testsuite/archiver/mount_cmds.py
  22. 19 18
      src/borg/testsuite/archiver/patterns.py
  23. 213 212
      src/borg/testsuite/archiver/prune_cmd.py
  24. 65 64
      src/borg/testsuite/archiver/rcompress_cmd.py
  25. 71 76
      src/borg/testsuite/archiver/rcreate_cmd.py
  26. 19 24
      src/borg/testsuite/archiver/rdelete_cmd.py
  27. 363 306
      src/borg/testsuite/archiver/recreate_cmd.py
  28. 24 31
      src/borg/testsuite/archiver/rename_cmd.py
  29. 7 7
      src/borg/testsuite/archiver/return_codes.py
  30. 58 57
      src/borg/testsuite/archiver/rinfo_cmd.py
  31. 123 114
      src/borg/testsuite/archiver/rlist_cmd.py
  32. 3 0
      src/borg/testsuite/archiver/serve_cmd.py
  33. 253 224
      src/borg/testsuite/archiver/tar_cmds.py
  34. 289 298
      src/borg/testsuite/archiver/transfer_cmd.py
  35. 25 21
      src/borg/testsuite/benchmark.py

+ 83 - 1
conftest.py

@@ -1,13 +1,16 @@
 import os
 import os
+from typing import Optional
 
 
 import pytest
 import pytest
 
 
-# needed to get pretty assertion failures in unit tests:
+from borg.testsuite.archiver import BORG_EXES
+
 if hasattr(pytest, "register_assert_rewrite"):
 if hasattr(pytest, "register_assert_rewrite"):
     pytest.register_assert_rewrite("borg.testsuite")
     pytest.register_assert_rewrite("borg.testsuite")
 
 
 
 
 import borg.cache  # noqa: E402
 import borg.cache  # noqa: E402
+from borg.archiver import Archiver
 from borg.logger import setup_logging  # noqa: E402
 from borg.logger import setup_logging  # noqa: E402
 
 
 # Ensure that the loggers exist for all tests
 # Ensure that the loggers exist for all tests
@@ -73,3 +76,82 @@ class DefaultPatches:
 @pytest.fixture(autouse=True)
 @pytest.fixture(autouse=True)
 def default_patches(request):
 def default_patches(request):
     return DefaultPatches(request)
     return DefaultPatches(request)
+
+
+@pytest.fixture()
+def set_env_variables():
+    os.environ["BORG_CHECK_I_KNOW_WHAT_I_AM_DOING"] = "YES"
+    os.environ["BORG_DELETE_I_KNOW_WHAT_I_AM_DOING"] = "YES"
+    os.environ["BORG_PASSPHRASE"] = "waytooeasyonlyfortests"
+    os.environ["BORG_SELFTEST"] = "disabled"
+
+
+class ArchiverSetup:
+    EXE: str = None  # python source based
+    FORK_DEFAULT = False
+    BORG_EXES = []
+
+    def __init__(self):
+        self.archiver = None
+        self.tmpdir: Optional[str] = None
+        self.repository_path: Optional[str] = None
+        self.repository_location: Optional[str] = None
+        self.input_path: Optional[str] = None
+        self.output_path: Optional[str] = None
+        self.keys_path: Optional[str] = None
+        self.cache_path: Optional[str] = None
+        self.exclude_file_path: Optional[str] = None
+        self.patterns_file_path: Optional[str] = None
+
+    def get_kind(self) -> str:
+        if self.repository_location.startswith("ssh://__testsuite__"):
+            return "remote"
+        elif self.EXE == "borg.exe":
+            return "binary"
+        else:
+            return "local"
+
+
+@pytest.fixture()
+def archiver(tmp_path, set_env_variables):
+    archiver = ArchiverSetup()
+    archiver.archiver = not archiver.FORK_DEFAULT and Archiver() or None
+    archiver.tmpdir = tmp_path
+    archiver.repository_path = os.fspath(tmp_path / "repository")
+    archiver.repository_location = archiver.repository_path
+    archiver.input_path = os.fspath(tmp_path / "input")
+    archiver.output_path = os.fspath(tmp_path / "output")
+    archiver.keys_path = os.fspath(tmp_path / "keys")
+    archiver.cache_path = os.fspath(tmp_path / "cache")
+    archiver.exclude_file_path = os.fspath(tmp_path / "excludes")
+    archiver.patterns_file_path = os.fspath(tmp_path / "patterns")
+    os.environ["BORG_KEYS_DIR"] = archiver.keys_path
+    os.environ["BORG_CACHE_DIR"] = archiver.cache_path
+    os.mkdir(archiver.input_path)
+    os.chmod(archiver.input_path, 0o777)  # avoid troubles with fakeroot / FUSE
+    os.mkdir(archiver.output_path)
+    os.mkdir(archiver.keys_path)
+    os.mkdir(archiver.cache_path)
+    with open(archiver.exclude_file_path, "wb") as fd:
+        fd.write(b"input/file2\n# A comment line, then a blank line\n\n")
+    with open(archiver.patterns_file_path, "wb") as fd:
+        fd.write(b"+input/file_important\n- input/file*\n# A comment line, then a blank line\n\n")
+    old_wd = os.getcwd()
+    os.chdir(archiver.tmpdir)
+    yield archiver
+    os.chdir(old_wd)
+
+
+@pytest.fixture()
+def remote_archiver(archiver):
+    archiver.repository_location = "ssh://__testsuite__" + str(archiver.repository_path)
+    yield archiver
+
+
+@pytest.fixture()
+def binary_archiver(archiver):
+    if "binary" not in BORG_EXES:
+        pytest.skip("No borg.exe binary available")
+    archiver.EXE = "borg.exe"
+    archiver.FORK_DEFAULT = True
+    yield archiver

+ 421 - 260
src/borg/testsuite/archiver/__init__.py

@@ -1,13 +1,15 @@
 import errno
 import errno
+import filecmp
 import io
 import io
 import os
 import os
-import shutil
+import re
 import stat
 import stat
 import subprocess
 import subprocess
 import sys
 import sys
 import tempfile
 import tempfile
 import time
 import time
 from configparser import ConfigParser
 from configparser import ConfigParser
+from contextlib import contextmanager
 from datetime import datetime
 from datetime import datetime
 from io import BytesIO, StringIO
 from io import BytesIO, StringIO
 
 
@@ -18,17 +20,19 @@ from ...archive import Archive
 from ...archiver import Archiver, PURE_PYTHON_MSGPACK_WARNING
 from ...archiver import Archiver, PURE_PYTHON_MSGPACK_WARNING
 from ...cache import Cache
 from ...cache import Cache
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
-from ...helpers import Location
+from ...helpers import Location, umount
 from ...helpers import EXIT_SUCCESS
 from ...helpers import EXIT_SUCCESS
 from ...helpers import bin_to_hex
 from ...helpers import bin_to_hex
 from ...logger import flush_logging
 from ...logger import flush_logging
 from ...manifest import Manifest
 from ...manifest import Manifest
+from ...platform import get_flags
 from ...remote import RemoteRepository
 from ...remote import RemoteRepository
 from ...repository import Repository
 from ...repository import Repository
-from .. import has_lchflags
-from .. import BaseTestCase, changedir, environment_variable
+from .. import has_lchflags, is_utime_fully_supported, have_fuse_mtime_ns, st_mtime_ns_round, no_selinux
+from .. import changedir
 from .. import are_symlinks_supported, are_hardlinks_supported, are_fifos_supported
 from .. import are_symlinks_supported, are_hardlinks_supported, are_fifos_supported
 from ..platform import is_win32
 from ..platform import is_win32
+from ...xattr import get_all
 
 
 RK_ENCRYPTION = "--encryption=repokey-aes-ocb"
 RK_ENCRYPTION = "--encryption=repokey-aes-ocb"
 KF_ENCRYPTION = "--encryption=keyfile-chacha20-poly1305"
 KF_ENCRYPTION = "--encryption=keyfile-chacha20-poly1305"
@@ -101,7 +105,7 @@ except FileNotFoundError:
 
 
 
 
 @pytest.fixture(params=BORG_EXES)
 @pytest.fixture(params=BORG_EXES)
-def cmd(request):
+def cmd_fixture(request):
     if request.param == "python":
     if request.param == "python":
         exe = None
         exe = None
     elif request.param == "binary":
     elif request.param == "binary":
@@ -115,265 +119,422 @@ def cmd(request):
     return exec_fn
     return exec_fn
 
 
 
 
+def generate_archiver_tests(metafunc, kinds: str):
+    # Generate tests for different scenarios: local repository, remote repository, and using the borg binary.
+    archivers = []
+    for kind in kinds.split(","):
+        if kind == "local":
+            archivers.append("archiver")
+        elif kind == "remote":
+            archivers.append("remote_archiver")
+        elif kind == "binary":
+            archivers.append("binary_archiver")
+        else:
+            raise ValueError(f"Invalid archiver: Expected local, remote, or binary, received {kind}.")
+
+    if "archivers" in metafunc.fixturenames:
+        metafunc.parametrize("archivers", archivers)
+
+
 def checkts(ts):
 def checkts(ts):
     # check if the timestamp is in the expected format
     # check if the timestamp is in the expected format
     assert datetime.strptime(ts, ISO_FORMAT + "%z")  # must not raise
     assert datetime.strptime(ts, ISO_FORMAT + "%z")  # must not raise
 
 
 
 
-class ArchiverTestCaseBase(BaseTestCase):
-    EXE: str = None  # python source based
-    FORK_DEFAULT = False
-    prefix = ""
-
-    def setUp(self):
-        os.environ["BORG_CHECK_I_KNOW_WHAT_I_AM_DOING"] = "YES"
-        os.environ["BORG_DELETE_I_KNOW_WHAT_I_AM_DOING"] = "YES"
-        os.environ["BORG_PASSPHRASE"] = "waytooeasyonlyfortests"
-        os.environ["BORG_SELFTEST"] = "disabled"
-        self.archiver = not self.FORK_DEFAULT and Archiver() or None
-        self.tmpdir = tempfile.mkdtemp()
-        self.repository_path = os.path.join(self.tmpdir, "repository")
-        self.repository_location = self.prefix + self.repository_path
-        self.input_path = os.path.join(self.tmpdir, "input")
-        self.output_path = os.path.join(self.tmpdir, "output")
-        self.keys_path = os.path.join(self.tmpdir, "keys")
-        self.cache_path = os.path.join(self.tmpdir, "cache")
-        self.exclude_file_path = os.path.join(self.tmpdir, "excludes")
-        self.patterns_file_path = os.path.join(self.tmpdir, "patterns")
-        os.environ["BORG_KEYS_DIR"] = self.keys_path
-        os.environ["BORG_CACHE_DIR"] = self.cache_path
-        os.mkdir(self.input_path)
-        os.chmod(self.input_path, 0o777)  # avoid troubles with fakeroot / FUSE
-        os.mkdir(self.output_path)
-        os.mkdir(self.keys_path)
-        os.mkdir(self.cache_path)
-        with open(self.exclude_file_path, "wb") as fd:
-            fd.write(b"input/file2\n# A comment line, then a blank line\n\n")
-        with open(self.patterns_file_path, "wb") as fd:
-            fd.write(b"+input/file_important\n- input/file*\n# A comment line, then a blank line\n\n")
-        self._old_wd = os.getcwd()
-        os.chdir(self.tmpdir)
-
-    def tearDown(self):
-        os.chdir(self._old_wd)
-        # note: ignore_errors=True as workaround for issue #862
-        shutil.rmtree(self.tmpdir, ignore_errors=True)
-
-    def cmd(self, *args, **kw):
-        exit_code = kw.pop("exit_code", 0)
-        fork = kw.pop("fork", None)
-        binary_output = kw.get("binary_output", False)
-        if fork is None:
-            fork = self.FORK_DEFAULT
-        ret, output = exec_cmd(*args, fork=fork, exe=self.EXE, archiver=self.archiver, **kw)
-        if ret != exit_code:
-            print(output)
-        self.assert_equal(ret, exit_code)
-        # if tests are run with the pure-python msgpack, there will be warnings about
-        # this in the output, which would make a lot of tests fail.
-        pp_msg = PURE_PYTHON_MSGPACK_WARNING.encode() if binary_output else PURE_PYTHON_MSGPACK_WARNING
-        empty = b"" if binary_output else ""
-        output = empty.join(line for line in output.splitlines(keepends=True) if pp_msg not in line)
-        return output
-
-    def create_src_archive(self, name, ts=None):
-        if ts:
-            self.cmd(
-                f"--repo={self.repository_location}", "create", "--compression=lz4", f"--timestamp={ts}", name, src_dir
-            )
-        else:
-            self.cmd(f"--repo={self.repository_location}", "create", "--compression=lz4", name, src_dir)
-
-    def open_archive(self, name):
-        repository = Repository(self.repository_path, exclusive=True)
-        with repository:
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-            archive = Archive(manifest, name)
-        return archive, repository
-
-    def open_repository(self):
-        return Repository(self.repository_path, exclusive=True)
-
-    def create_regular_file(self, name, size=0, contents=None):
-        assert not (size != 0 and contents and len(contents) != size), "size and contents do not match"
-        filename = os.path.join(self.input_path, name)
-        if not os.path.exists(os.path.dirname(filename)):
-            os.makedirs(os.path.dirname(filename))
-        with open(filename, "wb") as fd:
-            if contents is None:
-                contents = b"X" * size
-            fd.write(contents)
-
-    def create_test_files(self, create_hardlinks=True):
-        """Create a minimal test case including all supported file types"""
-        # File
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("flagfile", size=1024)
-        # Directory
-        self.create_regular_file("dir2/file2", size=1024 * 80)
-        # File mode
-        os.chmod("input/file1", 0o4755)
-        # Hard link
-        if are_hardlinks_supported() and create_hardlinks:
-            os.link(os.path.join(self.input_path, "file1"), os.path.join(self.input_path, "hardlink"))
-        # Symlink
-        if are_symlinks_supported():
-            os.symlink("somewhere", os.path.join(self.input_path, "link1"))
-        self.create_regular_file("fusexattr", size=1)
-        if not xattr.XATTR_FAKEROOT and xattr.is_enabled(self.input_path):
-            fn = os.fsencode(os.path.join(self.input_path, "fusexattr"))
-            # ironically, due to the way how fakeroot works, comparing FUSE file xattrs to orig file xattrs
-            # will FAIL if fakeroot supports xattrs, thus we only set the xattr if XATTR_FAKEROOT is False.
-            # This is because fakeroot with xattr-support does not propagate xattrs of the underlying file
-            # into "fakeroot space". Because the xattrs exposed by borgfs are these of an underlying file
-            # (from fakeroots point of view) they are invisible to the test process inside the fakeroot.
-            xattr.setxattr(fn, b"user.foo", b"bar")
-            xattr.setxattr(fn, b"user.empty", b"")
-            # XXX this always fails for me
-            # ubuntu 14.04, on a TMP dir filesystem with user_xattr, using fakeroot
-            # same for newer ubuntu and centos.
-            # if this is supported just on specific platform, platform should be checked first,
-            # so that the test setup for all tests using it does not fail here always for others.
-            # xattr.setxattr(os.path.join(self.input_path, 'link1'), b'user.foo_symlink', b'bar_symlink', follow_symlinks=False)
-        # FIFO node
-        if are_fifos_supported():
-            os.mkfifo(os.path.join(self.input_path, "fifo1"))
-        if has_lchflags:
-            platform.set_flags(os.path.join(self.input_path, "flagfile"), stat.UF_NODUMP)
-
-        if is_win32:
+def cmd(archiver, *args, **kw):
+    exit_code = kw.pop("exit_code", 0)
+    fork = kw.pop("fork", None)
+    binary_output = kw.get("binary_output", False)
+    if fork is None:
+        fork = archiver.FORK_DEFAULT
+    ret, output = exec_cmd(*args, archiver=archiver.archiver, fork=fork, exe=archiver.EXE, **kw)
+    if ret != exit_code:
+        print(output)
+    assert ret == exit_code
+    # if tests are run with the pure-python msgpack, there will be warnings about
+    # this in the output, which would make a lot of tests fail.
+    pp_msg = PURE_PYTHON_MSGPACK_WARNING.encode() if binary_output else PURE_PYTHON_MSGPACK_WARNING
+    empty = b"" if binary_output else ""
+    output = empty.join(line for line in output.splitlines(keepends=True) if pp_msg not in line)
+    return output
+
+
+def create_src_archive(archiver, name, ts=None):
+    repo_location, source_dir = archiver.repository_location, src_dir
+    if ts:
+        cmd(archiver, f"--repo={repo_location}", "create", "--compression=lz4", f"--timestamp={ts}", name, source_dir)
+    else:
+        cmd(archiver, f"--repo={repo_location}", "create", "--compression=lz4", name, source_dir)
+
+
+def open_archive(repo_path, name):
+    repository = Repository(repo_path, exclusive=True)
+    with repository:
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+        archive = Archive(manifest, name)
+    return archive, repository
+
+
+def open_repository(archiver):
+    if archiver.get_kind() == "remote":
+        return RemoteRepository(Location(archiver.repository_location))
+    else:
+        return Repository(archiver.repository_path, exclusive=True)
+
+
+def create_regular_file(input_path, name, size=0, contents=None):
+    assert not (size != 0 and contents and len(contents) != size), "size and contents do not match"
+    filename = os.path.join(input_path, name)
+    if not os.path.exists(os.path.dirname(filename)):
+        os.makedirs(os.path.dirname(filename))
+    with open(filename, "wb") as fd:
+        if contents is None:
+            contents = b"X" * size
+        fd.write(contents)
+
+
+def create_test_files(input_path, create_hardlinks=True):
+    """Create a minimal test case including all supported file types"""
+    # File
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "flagfile", size=1024)
+    # Directory
+    create_regular_file(input_path, "dir2/file2", size=1024 * 80)
+    # File mode
+    os.chmod("input/file1", 0o4755)
+    # Hard link
+    if are_hardlinks_supported() and create_hardlinks:
+        os.link(os.path.join(input_path, "file1"), os.path.join(input_path, "hardlink"))
+    # Symlink
+    if are_symlinks_supported():
+        os.symlink("somewhere", os.path.join(input_path, "link1"))
+    create_regular_file(input_path, "fusexattr", size=1)
+    if not xattr.XATTR_FAKEROOT and xattr.is_enabled(input_path):
+        fn = os.fsencode(os.path.join(input_path, "fusexattr"))
+        # ironically, due to the way how fakeroot works, comparing FUSE file xattrs to orig file xattrs
+        # will FAIL if fakeroot supports xattrs, thus we only set the xattr if XATTR_FAKEROOT is False.
+        # This is because fakeroot with xattr-support does not propagate xattrs of the underlying file
+        # into "fakeroot space". Because the xattrs exposed by borgfs are these of an underlying file
+        # (from fakeroots point of view) they are invisible to the test process inside the fakeroot.
+        xattr.setxattr(fn, b"user.foo", b"bar")
+        xattr.setxattr(fn, b"user.empty", b"")
+        # XXX this always fails for me
+        # ubuntu 14.04, on a TMP dir filesystem with user_xattr, using fakeroot
+        # same for newer ubuntu and centos.
+        # if this is supported just on specific platform, platform should be checked first,
+        # so that the test setup for all tests using it does not fail here always for others.
+    # FIFO node
+    if are_fifos_supported():
+        os.mkfifo(os.path.join(input_path, "fifo1"))
+    if has_lchflags:
+        platform.set_flags(os.path.join(input_path, "flagfile"), stat.UF_NODUMP)
+
+    if is_win32:
+        have_root = False
+    else:
+        try:
+            # Block device
+            os.mknod("input/bdev", 0o600 | stat.S_IFBLK, os.makedev(10, 20))
+            # Char device
+            os.mknod("input/cdev", 0o600 | stat.S_IFCHR, os.makedev(30, 40))
+            # File owner
+            os.chown("input/file1", 100, 200)  # raises OSError invalid argument on cygwin
+            # File mode
+            os.chmod("input/dir2", 0o555)  # if we take away write perms, we need root to remove contents
+            have_root = True  # we have (fake)root
+        except PermissionError:
             have_root = False
             have_root = False
-        else:
+        except OSError as e:
+            # Note: ENOSYS "Function not implemented" happens as non-root on Win 10 Linux Subsystem.
+            if e.errno not in (errno.EINVAL, errno.ENOSYS):
+                raise
+            have_root = False
+    time.sleep(1)  # "empty" must have newer timestamp than other files
+    create_regular_file(input_path, "empty", size=0)
+    return have_root
+
+
+def _extract_repository_id(repo_path):
+    with Repository(repo_path) as repository:
+        return repository.id
+
+
+def _set_repository_id(repo_path, id):
+    config = ConfigParser(interpolation=None)
+    config.read(os.path.join(repo_path, "config"))
+    config.set("repository", "id", bin_to_hex(id))
+    with open(os.path.join(repo_path, "config"), "w") as fd:
+        config.write(fd)
+    with Repository(repo_path) as repository:
+        return repository.id
+
+
+def _extract_hardlinks_setup(archiver):
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    os.mkdir(os.path.join(input_path, "dir1"))
+    os.mkdir(os.path.join(input_path, "dir1/subdir"))
+
+    create_regular_file(input_path, "source", contents=b"123456")
+    os.link(os.path.join(input_path, "source"), os.path.join(input_path, "abba"))
+    os.link(os.path.join(input_path, "source"), os.path.join(input_path, "dir1/hardlink"))
+    os.link(os.path.join(input_path, "source"), os.path.join(input_path, "dir1/subdir/hardlink"))
+
+    create_regular_file(input_path, "dir1/source2")
+    os.link(os.path.join(input_path, "dir1/source2"), os.path.join(input_path, "dir1/aaaa"))
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+
+
+def _create_test_caches(archiver):
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "cache1/%s" % CACHE_TAG_NAME, contents=CACHE_TAG_CONTENTS + b" extra stuff")
+    create_regular_file(input_path, "cache2/%s" % CACHE_TAG_NAME, contents=b"invalid signature")
+    os.mkdir("input/cache3")
+    if are_hardlinks_supported():
+        os.link("input/cache1/%s" % CACHE_TAG_NAME, "input/cache3/%s" % CACHE_TAG_NAME)
+    else:
+        create_regular_file(
+            archiver.input_path, "cache3/%s" % CACHE_TAG_NAME, contents=CACHE_TAG_CONTENTS + b" extra stuff"
+        )
+
+
+def _assert_test_caches(archiver):
+    with changedir("output"):
+        cmd(archiver, f"--repo={archiver.repository_location}", "extract", "test")
+    assert sorted(os.listdir("output/input")) == ["cache2", "file1"]
+    assert sorted(os.listdir("output/input/cache2")) == [CACHE_TAG_NAME]
+
+
+def _create_test_tagged(archiver):
+    cmd(archiver, f"--repo={archiver.repository_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(archiver.input_path, "file1", size=1024 * 80)
+    create_regular_file(archiver.input_path, "tagged1/.NOBACKUP")
+    create_regular_file(archiver.input_path, "tagged2/00-NOBACKUP")
+    create_regular_file(archiver.input_path, "tagged3/.NOBACKUP/file2", size=1024)
+
+
+def _assert_test_tagged(archiver):
+    with changedir("output"):
+        cmd(archiver, f"--repo={archiver.repository_location}", "extract", "test")
+    assert sorted(os.listdir("output/input")) == ["file1"]
+
+
+def _create_test_keep_tagged(archiver):
+    input_path = archiver.input_path
+    cmd(archiver, f"--repo={archiver.repository_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file0", size=1024)
+    create_regular_file(input_path, "tagged1/.NOBACKUP1")
+    create_regular_file(input_path, "tagged1/file1", size=1024)
+    create_regular_file(input_path, "tagged2/.NOBACKUP2/subfile1", size=1024)
+    create_regular_file(input_path, "tagged2/file2", size=1024)
+    create_regular_file(input_path, "tagged3/%s" % CACHE_TAG_NAME, contents=CACHE_TAG_CONTENTS + b" extra stuff")
+    create_regular_file(input_path, "tagged3/file3", size=1024)
+    create_regular_file(input_path, "taggedall/.NOBACKUP1")
+    create_regular_file(input_path, "taggedall/.NOBACKUP2/subfile1", size=1024)
+    create_regular_file(input_path, "taggedall/%s" % CACHE_TAG_NAME, contents=CACHE_TAG_CONTENTS + b" extra stuff")
+    create_regular_file(input_path, "taggedall/file4", size=1024)
+
+
+def _assert_test_keep_tagged(archiver):
+    with changedir("output"):
+        cmd(archiver, f"--repo={archiver.repository_location}", "extract", "test")
+    assert sorted(os.listdir("output/input")), ["file0", "tagged1", "tagged2", "tagged3", "taggedall"]
+    assert os.listdir("output/input/tagged1"), [".NOBACKUP1"]
+    assert os.listdir("output/input/tagged2"), [".NOBACKUP2"]
+    assert os.listdir("output/input/tagged3"), [CACHE_TAG_NAME]
+    assert sorted(os.listdir("output/input/taggedall")), [".NOBACKUP1", ".NOBACKUP2", CACHE_TAG_NAME]
+
+
+def check_cache(archiver):
+    # First run a regular borg check
+    cmd(archiver, f"--repo={archiver.repository_location}", "check")
+    # Then check that the cache on disk matches exactly what's in the repo.
+    with open_repository(archiver) as repository:
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+        with Cache(repository, manifest, sync=False) as cache:
+            original_chunks = cache.chunks
+        Cache.destroy(repository)
+        with Cache(repository, manifest) as cache:
+            correct_chunks = cache.chunks
+    assert original_chunks is not correct_chunks
+    seen = set()
+    for id, (refcount, size) in correct_chunks.iteritems():
+        o_refcount, o_size = original_chunks[id]
+        assert refcount == o_refcount
+        assert size == o_size
+        seen.add(id)
+    for id, (refcount, size) in original_chunks.iteritems():
+        assert id in seen
+
+
+@contextmanager
+def assert_creates_file(path):
+    assert not os.path.exists(path), f"{path} should not exist"
+    yield
+    assert os.path.exists(path), f"{path} should exist"
+
+
+def assert_dirs_equal(dir1, dir2, **kwargs):
+    diff = filecmp.dircmp(dir1, dir2)
+    _assert_dirs_equal_cmp(diff, **kwargs)
+
+
+def assert_line_exists(lines, expected_regexpr):
+    assert any(re.search(expected_regexpr, line) for line in lines), f"no match for {expected_regexpr} in {lines}"
+
+
+def _assert_dirs_equal_cmp(diff, ignore_flags=False, ignore_xattrs=False, ignore_ns=False):
+    assert diff.left_only == []
+    assert diff.right_only == []
+    assert diff.diff_files == []
+    assert diff.funny_files == []
+    for filename in diff.common:
+        path1 = os.path.join(diff.left, filename)
+        path2 = os.path.join(diff.right, filename)
+        s1 = os.stat(path1, follow_symlinks=False)
+        s2 = os.stat(path2, follow_symlinks=False)
+        # Assume path2 is on FUSE if st_dev is different
+        fuse = s1.st_dev != s2.st_dev
+        attrs = ["st_uid", "st_gid", "st_rdev"]
+        if not fuse or not os.path.isdir(path1):
+            # dir nlink is always 1 on our FUSE filesystem
+            attrs.append("st_nlink")
+        d1 = [filename] + [getattr(s1, a) for a in attrs]
+        d2 = [filename] + [getattr(s2, a) for a in attrs]
+        d1.insert(1, oct(s1.st_mode))
+        d2.insert(1, oct(s2.st_mode))
+        if not ignore_flags:
+            d1.append(get_flags(path1, s1))
+            d2.append(get_flags(path2, s2))
+        # ignore st_rdev if file is not a block/char device, fixes #203
+        if not stat.S_ISCHR(s1.st_mode) and not stat.S_ISBLK(s1.st_mode):
+            d1[4] = None
+        if not stat.S_ISCHR(s2.st_mode) and not stat.S_ISBLK(s2.st_mode):
+            d2[4] = None
+        # If utime isn't fully supported, borg can't set mtime.
+        # Therefore, we shouldn't test it in that case.
+        if is_utime_fully_supported():
+            # Older versions of llfuse do not support ns precision properly
+            if ignore_ns:
+                d1.append(int(s1.st_mtime_ns / 1e9))
+                d2.append(int(s2.st_mtime_ns / 1e9))
+            elif fuse and not have_fuse_mtime_ns:
+                d1.append(round(s1.st_mtime_ns, -4))
+                d2.append(round(s2.st_mtime_ns, -4))
+            else:
+                d1.append(round(s1.st_mtime_ns, st_mtime_ns_round))
+                d2.append(round(s2.st_mtime_ns, st_mtime_ns_round))
+        if not ignore_xattrs:
+            d1.append(no_selinux(get_all(path1, follow_symlinks=False)))
+            d2.append(no_selinux(get_all(path2, follow_symlinks=False)))
+        assert d1 == d2
+    for sub_diff in diff.subdirs.values():
+        _assert_dirs_equal_cmp(sub_diff, ignore_flags=ignore_flags, ignore_xattrs=ignore_xattrs, ignore_ns=ignore_ns)
+
+
+@contextmanager
+def read_only(path):
+    """Some paths need to be made read-only for testing
+
+    If the tests are executed inside a fakeroot environment, the
+    changes from chmod won't affect the real permissions of that
+    folder. This issue is circumvented by temporarily disabling
+    fakeroot with `LD_PRELOAD=`.
+
+    Using chmod to remove write permissions is not enough if the
+    tests are running with root privileges. Instead, the folder is
+    rendered immutable with chattr or chflags, respectively.
+    """
+    if sys.platform.startswith("linux"):
+        cmd_immutable = 'chattr +i "%s"' % path
+        cmd_mutable = 'chattr -i "%s"' % path
+    elif sys.platform.startswith(("darwin", "freebsd", "netbsd", "openbsd")):
+        cmd_immutable = 'chflags uchg "%s"' % path
+        cmd_mutable = 'chflags nouchg "%s"' % path
+    elif sys.platform.startswith("sunos"):  # openindiana
+        cmd_immutable = 'chmod S+vimmutable "%s"' % path
+        cmd_mutable = 'chmod S-vimmutable "%s"' % path
+    else:
+        message = "Testing read-only repos is not supported on platform %s" % sys.platform
+        pytest.skip(message)
+    try:
+        os.system('LD_PRELOAD= chmod -R ugo-w "%s"' % path)
+        os.system(cmd_immutable)
+        yield
+    finally:
+        # Restore permissions to ensure clean-up doesn't fail
+        os.system(cmd_mutable)
+        os.system('LD_PRELOAD= chmod -R ugo+w "%s"' % path)
+
+
+def wait_for_mountstate(mountpoint, *, mounted, timeout=5):
+    """Wait until a path meets specified mount point status"""
+    timeout += time.time()
+    while timeout > time.time():
+        if os.path.ismount(mountpoint) == mounted:
+            return
+        time.sleep(0.1)
+    message = "Waiting for {} of {}".format("mount" if mounted else "umount", mountpoint)
+    raise TimeoutError(message)
+
+
+@contextmanager
+def fuse_mount(archiver, location, mountpoint=None, *options, fork=True, os_fork=False, **kwargs):
+    # For a successful mount, `fork = True` is required for
+    # the borg mount daemon to work properly or the tests
+    # will just freeze. Therefore, if argument `fork` is not
+    # specified, the default value is `True`, regardless of
+    # `FORK_DEFAULT`. However, leaving the possibility to run
+    # the command with `fork = False` is still necessary for
+    # testing for mount failures, for example attempting to
+    # mount a read-only repo.
+    #    `os_fork = True` is needed for testing (the absence of)
+    # a race condition of the Lock during lock migration when
+    # borg mount (local repo) is daemonizing (#4953). This is another
+    # example where we need `fork = False`, because the test case
+    # needs an OS fork, not a spawning of the fuse mount.
+    # `fork = False` is implied if `os_fork = True`.
+    if mountpoint is None:
+        mountpoint = tempfile.mkdtemp()
+    else:
+        os.mkdir(mountpoint)
+    args = [f"--repo={location}", "mount", mountpoint] + list(options)
+    if os_fork:
+        # Do not spawn, but actually (OS) fork.
+        if os.fork() == 0:
+            # The child process.
+            # Decouple from parent and fork again.
+            # Otherwise, it becomes a zombie and pretends to be alive.
+            os.setsid()
+            if os.fork() > 0:
+                os._exit(0)
+            # The grandchild process.
             try:
             try:
-                # Block device
-                os.mknod("input/bdev", 0o600 | stat.S_IFBLK, os.makedev(10, 20))
-                # Char device
-                os.mknod("input/cdev", 0o600 | stat.S_IFCHR, os.makedev(30, 40))
-                # File owner
-                os.chown("input/file1", 100, 200)  # raises OSError invalid argument on cygwin
-                # File mode
-                os.chmod("input/dir2", 0o555)  # if we take away write perms, we need root to remove contents
-                have_root = True  # we have (fake)root
-            except PermissionError:
-                have_root = False
-            except OSError as e:
-                # Note: ENOSYS "Function not implemented" happens as non-root on Win 10 Linux Subsystem.
-                if e.errno not in (errno.EINVAL, errno.ENOSYS):
-                    raise
-                have_root = False
-        time.sleep(1)  # "empty" must have newer timestamp than other files
-        self.create_regular_file("empty", size=0)
-        return have_root
-
-    def _extract_repository_id(self, path):
-        with Repository(self.repository_path) as repository:
-            return repository.id
-
-    def _set_repository_id(self, path, id):
-        config = ConfigParser(interpolation=None)
-        config.read(os.path.join(path, "config"))
-        config.set("repository", "id", bin_to_hex(id))
-        with open(os.path.join(path, "config"), "w") as fd:
-            config.write(fd)
-        with Repository(self.repository_path) as repository:
-            return repository.id
-
-    def _extract_hardlinks_setup(self):
-        os.mkdir(os.path.join(self.input_path, "dir1"))
-        os.mkdir(os.path.join(self.input_path, "dir1/subdir"))
-
-        self.create_regular_file("source", contents=b"123456")
-        os.link(os.path.join(self.input_path, "source"), os.path.join(self.input_path, "abba"))
-        os.link(os.path.join(self.input_path, "source"), os.path.join(self.input_path, "dir1/hardlink"))
-        os.link(os.path.join(self.input_path, "source"), os.path.join(self.input_path, "dir1/subdir/hardlink"))
-
-        self.create_regular_file("dir1/source2")
-        os.link(os.path.join(self.input_path, "dir1/source2"), os.path.join(self.input_path, "dir1/aaaa"))
-
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-
-    def _create_test_caches(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("cache1/%s" % CACHE_TAG_NAME, contents=CACHE_TAG_CONTENTS + b" extra stuff")
-        self.create_regular_file("cache2/%s" % CACHE_TAG_NAME, contents=b"invalid signature")
-        os.mkdir("input/cache3")
-        if are_hardlinks_supported():
-            os.link("input/cache1/%s" % CACHE_TAG_NAME, "input/cache3/%s" % CACHE_TAG_NAME)
-        else:
-            self.create_regular_file("cache3/%s" % CACHE_TAG_NAME, contents=CACHE_TAG_CONTENTS + b" extra stuff")
-
-    def _assert_test_caches(self):
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-        self.assert_equal(sorted(os.listdir("output/input")), ["cache2", "file1"])
-        self.assert_equal(sorted(os.listdir("output/input/cache2")), [CACHE_TAG_NAME])
-
-    def _create_test_tagged(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("tagged1/.NOBACKUP")
-        self.create_regular_file("tagged2/00-NOBACKUP")
-        self.create_regular_file("tagged3/.NOBACKUP/file2", size=1024)
-
-    def _assert_test_tagged(self):
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-        self.assert_equal(sorted(os.listdir("output/input")), ["file1"])
-
-    def _create_test_keep_tagged(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file0", size=1024)
-        self.create_regular_file("tagged1/.NOBACKUP1")
-        self.create_regular_file("tagged1/file1", size=1024)
-        self.create_regular_file("tagged2/.NOBACKUP2/subfile1", size=1024)
-        self.create_regular_file("tagged2/file2", size=1024)
-        self.create_regular_file("tagged3/%s" % CACHE_TAG_NAME, contents=CACHE_TAG_CONTENTS + b" extra stuff")
-        self.create_regular_file("tagged3/file3", size=1024)
-        self.create_regular_file("taggedall/.NOBACKUP1")
-        self.create_regular_file("taggedall/.NOBACKUP2/subfile1", size=1024)
-        self.create_regular_file("taggedall/%s" % CACHE_TAG_NAME, contents=CACHE_TAG_CONTENTS + b" extra stuff")
-        self.create_regular_file("taggedall/file4", size=1024)
-
-    def _assert_test_keep_tagged(self):
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-        self.assert_equal(sorted(os.listdir("output/input")), ["file0", "tagged1", "tagged2", "tagged3", "taggedall"])
-        self.assert_equal(os.listdir("output/input/tagged1"), [".NOBACKUP1"])
-        self.assert_equal(os.listdir("output/input/tagged2"), [".NOBACKUP2"])
-        self.assert_equal(os.listdir("output/input/tagged3"), [CACHE_TAG_NAME])
-        self.assert_equal(sorted(os.listdir("output/input/taggedall")), [".NOBACKUP1", ".NOBACKUP2", CACHE_TAG_NAME])
-
-    def check_cache(self):
-        # First run a regular borg check
-        self.cmd(f"--repo={self.repository_location}", "check")
-        # Then check that the cache on disk matches exactly what's in the repo.
-        with self.open_repository() as repository:
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-            with Cache(repository, manifest, sync=False) as cache:
-                original_chunks = cache.chunks
-            Cache.destroy(repository)
-            with Cache(repository, manifest) as cache:
-                correct_chunks = cache.chunks
-        assert original_chunks is not correct_chunks
-        seen = set()
-        for id, (refcount, size) in correct_chunks.iteritems():
-            o_refcount, o_size = original_chunks[id]
-            assert refcount == o_refcount
-            assert size == o_size
-            seen.add(id)
-        for id, (refcount, size) in original_chunks.iteritems():
-            assert id in seen
-
-
-class ArchiverTestCaseBinaryBase:
-    EXE = "borg.exe"
-    FORK_DEFAULT = True
-
-
-class RemoteArchiverTestCaseBase:
-    prefix = "ssh://__testsuite__"
-
-    def open_repository(self):
-        return RemoteRepository(Location(self.repository_location))
+                cmd(archiver, *args, fork=False, **kwargs)  # borg mount not spawning.
+            finally:
+                # This should never be reached, since it daemonizes,
+                # and the grandchild process exits before cmd() returns.
+                # However, just in case...
+                print("Fatal: borg mount did not daemonize properly. Force exiting.", file=sys.stderr, flush=True)
+                os._exit(0)
+    else:
+        cmd(archiver, *args, fork=fork, **kwargs)
+        if kwargs.get("exit_code", EXIT_SUCCESS) == EXIT_ERROR:
+            # If argument `exit_code = EXIT_ERROR`, then this call
+            # is testing the behavior of an unsuccessful mount, and
+            # we must not continue, as there is no mount to work
+            # with. The test itself has already failed or succeeded
+            # with the call to `cmd`, above.
+            yield
+            return
+    wait_for_mountstate(mountpoint, mounted=True)
+    yield
+    umount(mountpoint)
+    wait_for_mountstate(mountpoint, mounted=False)
+    os.rmdir(mountpoint)
+    # Give the daemon some time to exit
+    time.sleep(0.2)

+ 28 - 26
src/borg/testsuite/archiver/argparsing.py

@@ -2,31 +2,33 @@ import argparse
 import pytest
 import pytest
 
 
 from ...helpers import parse_storage_quota
 from ...helpers import parse_storage_quota
-from . import ArchiverTestCaseBase, Archiver, RK_ENCRYPTION
-
-
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_bad_filters(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.cmd(f"--repo={self.repository_location}", "delete", "--first", "1", "--last", "1", fork=True, exit_code=2)
-
-    def test_highlander(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "--comment", "comment 1", "test-1", __file__)
-        error_msg = "There can be only one"
-        # Default umask value is 0077
-        # Test that it works with a one time specified default or custom value
-        output_default = self.cmd(f"--repo={self.repository_location}", "--umask", "0077", "rlist")
-        assert error_msg not in output_default
-        output_custom = self.cmd(f"--repo={self.repository_location}", "--umask", "0007", "rlist")
-        assert error_msg not in output_custom
-        # Test that all combinations of custom and default values fail
-        for first, second in [("0007", "0007"), ("0007", "0077"), ("0077", "0007"), ("0077", "0077")]:
-            output_custom = self.cmd(
-                f"--repo={self.repository_location}", "--umask", first, "--umask", second, "rlist", exit_code=2
-            )
-            assert error_msg in output_custom
+from . import Archiver, RK_ENCRYPTION, cmd
+
+
+def test_bad_filters(archiver):
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    cmd(archiver, f"--repo={repo_location}", "delete", "--first", "1", "--last", "1", fork=True, exit_code=2)
+
+
+def test_highlander(archiver):
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "--comment", "comment 1", "test-1", __file__)
+    error_msg = "There can be only one"
+    # Default umask value is 0077
+    # Test that it works with a one time specified default or custom value
+    output_default = cmd(archiver, f"--repo={repo_location}", "--umask", "0077", "rlist")
+    assert error_msg not in output_default
+    output_custom = cmd(archiver, f"--repo={repo_location}", "--umask", "0007", "rlist")
+    assert error_msg not in output_custom
+    # Test that all combinations of custom and default values fail
+    for first, second in [("0007", "0007"), ("0007", "0077"), ("0077", "0007"), ("0077", "0077")]:
+        output_custom = cmd(
+            archiver, f"--repo={repo_location}", "--umask", first, "--umask", second, "rlist", exit_code=2
+        )
+        assert error_msg in output_custom
 
 
 
 
 def test_get_args():
 def test_get_args():
@@ -184,8 +186,8 @@ class TestCommonOptions:
             "progress": False,
             "progress": False,
             "append_only": False,
             "append_only": False,
             "func": 1234,
             "func": 1234,
+            args_key: args_value,
         }
         }
-        result[args_key] = args_value
 
 
         assert parse_vars_from_line(*line) == result
         assert parse_vars_from_line(*line) == result
 
 

+ 6 - 6
src/borg/testsuite/archiver/benchmark_cmd.py

@@ -1,9 +1,9 @@
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
-from . import ArchiverTestCaseBase, RK_ENCRYPTION, environment_variable
+from .. import environment_variable
+from . import cmd, RK_ENCRYPTION
 
 
 
 
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_benchmark_crud(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        with environment_variable(_BORG_BENCHMARK_CRUD_TEST="YES"):
-            self.cmd(f"--repo={self.repository_location}", "benchmark", "crud", self.input_path)
+def test_benchmark_crud(archiver):
+    cmd(archiver, f"--repo={archiver.repository_location}", "rcreate", RK_ENCRYPTION)
+    with environment_variable(_BORG_BENCHMARK_CRUD_TEST="YES"):
+        cmd(archiver, f"--repo={archiver.repository_location}", "benchmark", "crud", archiver.input_path)

+ 132 - 115
src/borg/testsuite/archiver/bypass_lock_option.py

@@ -1,5 +1,3 @@
-import unittest
-
 import pytest
 import pytest
 
 
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
@@ -7,117 +5,136 @@ from ...helpers import EXIT_ERROR
 from ...locking import LockFailed
 from ...locking import LockFailed
 from ...remote import RemoteRepository
 from ...remote import RemoteRepository
 from .. import llfuse
 from .. import llfuse
-from . import ArchiverTestCaseBase, RK_ENCRYPTION
-
-
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_readonly_check(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_src_archive("test")
-        with self.read_only(self.repository_path):
-            # verify that command normally doesn't work with read-only repo
-            if self.FORK_DEFAULT:
-                self.cmd(f"--repo={self.repository_location}", "check", "--verify-data", exit_code=EXIT_ERROR)
-            else:
-                with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
-                    self.cmd(f"--repo={self.repository_location}", "check", "--verify-data")
-                if isinstance(excinfo.value, RemoteRepository.RPCError):
-                    assert excinfo.value.exception_class == "LockFailed"
-            # verify that command works with read-only repo when using --bypass-lock
-            self.cmd(f"--repo={self.repository_location}", "check", "--verify-data", "--bypass-lock")
-
-    def test_readonly_diff(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_src_archive("a")
-        self.create_src_archive("b")
-        with self.read_only(self.repository_path):
-            # verify that command normally doesn't work with read-only repo
-            if self.FORK_DEFAULT:
-                self.cmd(f"--repo={self.repository_location}", "diff", "a", "b", exit_code=EXIT_ERROR)
-            else:
-                with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
-                    self.cmd(f"--repo={self.repository_location}", "diff", "a", "b")
-                if isinstance(excinfo.value, RemoteRepository.RPCError):
-                    assert excinfo.value.exception_class == "LockFailed"
-            # verify that command works with read-only repo when using --bypass-lock
-            self.cmd(f"--repo={self.repository_location}", "diff", "a", "b", "--bypass-lock")
-
-    def test_readonly_export_tar(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_src_archive("test")
-        with self.read_only(self.repository_path):
-            # verify that command normally doesn't work with read-only repo
-            if self.FORK_DEFAULT:
-                self.cmd(f"--repo={self.repository_location}", "export-tar", "test", "test.tar", exit_code=EXIT_ERROR)
-            else:
-                with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
-                    self.cmd(f"--repo={self.repository_location}", "export-tar", "test", "test.tar")
-                if isinstance(excinfo.value, RemoteRepository.RPCError):
-                    assert excinfo.value.exception_class == "LockFailed"
-            # verify that command works with read-only repo when using --bypass-lock
-            self.cmd(f"--repo={self.repository_location}", "export-tar", "test", "test.tar", "--bypass-lock")
-
-    def test_readonly_extract(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_src_archive("test")
-        with self.read_only(self.repository_path):
-            # verify that command normally doesn't work with read-only repo
-            if self.FORK_DEFAULT:
-                self.cmd(f"--repo={self.repository_location}", "extract", "test", exit_code=EXIT_ERROR)
-            else:
-                with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
-                    self.cmd(f"--repo={self.repository_location}", "extract", "test")
-                if isinstance(excinfo.value, RemoteRepository.RPCError):
-                    assert excinfo.value.exception_class == "LockFailed"
-            # verify that command works with read-only repo when using --bypass-lock
-            self.cmd(f"--repo={self.repository_location}", "extract", "test", "--bypass-lock")
-
-    def test_readonly_info(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_src_archive("test")
-        with self.read_only(self.repository_path):
-            # verify that command normally doesn't work with read-only repo
-            if self.FORK_DEFAULT:
-                self.cmd(f"--repo={self.repository_location}", "rinfo", exit_code=EXIT_ERROR)
-            else:
-                with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
-                    self.cmd(f"--repo={self.repository_location}", "rinfo")
-                if isinstance(excinfo.value, RemoteRepository.RPCError):
-                    assert excinfo.value.exception_class == "LockFailed"
-            # verify that command works with read-only repo when using --bypass-lock
-            self.cmd(f"--repo={self.repository_location}", "rinfo", "--bypass-lock")
-
-    def test_readonly_list(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_src_archive("test")
-        with self.read_only(self.repository_path):
-            # verify that command normally doesn't work with read-only repo
-            if self.FORK_DEFAULT:
-                self.cmd(f"--repo={self.repository_location}", "rlist", exit_code=EXIT_ERROR)
-            else:
-                with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
-                    self.cmd(f"--repo={self.repository_location}", "rlist")
-                if isinstance(excinfo.value, RemoteRepository.RPCError):
-                    assert excinfo.value.exception_class == "LockFailed"
-            # verify that command works with read-only repo when using --bypass-lock
-            self.cmd(f"--repo={self.repository_location}", "rlist", "--bypass-lock")
-
-    @unittest.skipUnless(llfuse, "llfuse not installed")
-    def test_readonly_mount(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_src_archive("test")
-        with self.read_only(self.repository_path):
-            # verify that command normally doesn't work with read-only repo
-            if self.FORK_DEFAULT:
-                with self.fuse_mount(self.repository_location, exit_code=EXIT_ERROR):
-                    pass
-            else:
-                with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
-                    # self.fuse_mount always assumes fork=True, so for this test we have to set fork=False manually
-                    with self.fuse_mount(self.repository_location, fork=False):
-                        pass
-                if isinstance(excinfo.value, RemoteRepository.RPCError):
-                    assert excinfo.value.exception_class == "LockFailed"
-            # verify that command works with read-only repo when using --bypass-lock
-            with self.fuse_mount(self.repository_location, None, "--bypass-lock"):
+from . import cmd, create_src_archive, RK_ENCRYPTION, read_only, fuse_mount
+
+
+def test_readonly_check(archiver):
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_src_archive(archiver, "test")
+
+    with read_only(repo_path):
+        # verify that command normally doesn't work with read-only repo
+        if archiver.FORK_DEFAULT:
+            cmd(archiver, f"--repo={repo_location}", "check", "--verify-data", exit_code=EXIT_ERROR)
+        else:
+            with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
+                cmd(archiver, f"--repo={repo_location}", "check", "--verify-data")
+            if isinstance(excinfo.value, RemoteRepository.RPCError):
+                assert excinfo.value.exception_class == "LockFailed"
+        # verify that command works with read-only repo when using --bypass-lock
+        cmd(archiver, f"--repo={repo_location}", "check", "--verify-data", "--bypass-lock")
+
+
+def test_readonly_diff(archiver):
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_src_archive(archiver, "a")
+    create_src_archive(archiver, "b")
+
+    with read_only(repo_path):
+        # verify that command normally doesn't work with read-only repo
+        if archiver.FORK_DEFAULT:
+            cmd(archiver, f"--repo={repo_location}", "diff", "a", "b", exit_code=EXIT_ERROR)
+        else:
+            with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
+                cmd(archiver, f"--repo={repo_location}", "diff", "a", "b")
+            if isinstance(excinfo.value, RemoteRepository.RPCError):
+                assert excinfo.value.exception_class == "LockFailed"
+        # verify that command works with read-only repo when using --bypass-lock
+        cmd(archiver, f"--repo={repo_location}", "diff", "a", "b", "--bypass-lock")
+
+
+def test_readonly_export_tar(archiver):
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_src_archive(archiver, "test")
+
+    with read_only(repo_path):
+        # verify that command normally doesn't work with read-only repo
+        if archiver.FORK_DEFAULT:
+            cmd(archiver, f"--repo={repo_location}", "export-tar", "test", "test.tar", exit_code=EXIT_ERROR)
+        else:
+            with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
+                cmd(archiver, f"--repo={repo_location}", "export-tar", "test", "test.tar")
+            if isinstance(excinfo.value, RemoteRepository.RPCError):
+                assert excinfo.value.exception_class == "LockFailed"
+        # verify that command works with read-only repo when using --bypass-lock
+        cmd(archiver, f"--repo={repo_location}", "export-tar", "test", "test.tar", "--bypass-lock")
+
+
+def test_readonly_extract(archiver):
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_src_archive(archiver, "test")
+
+    with read_only(repo_path):
+        # verify that command normally doesn't work with read-only repo
+        if archiver.FORK_DEFAULT:
+            cmd(archiver, f"--repo={repo_location}", "extract", "test", exit_code=EXIT_ERROR)
+        else:
+            with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
+                cmd(archiver, f"--repo={repo_location}", "extract", "test")
+            if isinstance(excinfo.value, RemoteRepository.RPCError):
+                assert excinfo.value.exception_class == "LockFailed"
+        # verify that command works with read-only repo when using --bypass-lock
+        cmd(archiver, f"--repo={repo_location}", "extract", "test", "--bypass-lock")
+
+
+def test_readonly_info(archiver):
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_src_archive(archiver, "test")
+
+    with read_only(repo_path):
+        # verify that command normally doesn't work with read-only repo
+        if archiver.FORK_DEFAULT:
+            cmd(archiver, f"--repo={repo_location}", "rinfo", exit_code=EXIT_ERROR)
+        else:
+            with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
+                cmd(archiver, f"--repo={repo_location}", "rinfo")
+            if isinstance(excinfo.value, RemoteRepository.RPCError):
+                assert excinfo.value.exception_class == "LockFailed"
+        # verify that command works with read-only repo when using --bypass-lock
+        cmd(archiver, f"--repo={repo_location}", "rinfo", "--bypass-lock")
+
+
+def test_readonly_list(archiver):
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_src_archive(archiver, "test")
+
+    with read_only(repo_path):
+        # verify that command normally doesn't work with read-only repo
+        if archiver.FORK_DEFAULT:
+            cmd(archiver, f"--repo={repo_location}", "rlist", exit_code=EXIT_ERROR)
+        else:
+            with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
+                cmd(archiver, f"--repo={repo_location}", "rlist")
+            if isinstance(excinfo.value, RemoteRepository.RPCError):
+                assert excinfo.value.exception_class == "LockFailed"
+        # verify that command works with read-only repo when using --bypass-lock
+        cmd(archiver, f"--repo={repo_location}", "rlist", "--bypass-lock")
+
+
+@pytest.mark.skipif(not llfuse, reason="llfuse not installed")
+def test_readonly_mount(archiver):
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_src_archive(archiver, "test")
+
+    with read_only(repo_path):
+        # verify that command normally doesn't work with read-only repo
+        if archiver.FORK_DEFAULT:
+            with fuse_mount(archiver, repo_location, exit_code=EXIT_ERROR):
                 pass
                 pass
+        else:
+            with pytest.raises((LockFailed, RemoteRepository.RPCError)) as excinfo:
+                # self.fuse_mount always assumes fork=True, so for this test we have to set fork=False manually
+                with fuse_mount(archiver, repo_location, fork=False):
+                    pass
+            if isinstance(excinfo.value, RemoteRepository.RPCError):
+                assert excinfo.value.exception_class == "LockFailed"
+        # verify that command works with read-only repo when using --bypass-lock
+        with fuse_mount(archiver, repo_location, None, "--bypass-lock"):
+            pass

+ 310 - 270
src/borg/testsuite/archiver/check_cmd.py

@@ -1,302 +1,342 @@
 import shutil
 import shutil
-import unittest
 from unittest.mock import patch
 from unittest.mock import patch
 
 
+import pytest
+
 from ...archive import ChunkBuffer
 from ...archive import ChunkBuffer
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
 from ...helpers import bin_to_hex
 from ...helpers import bin_to_hex
 from ...helpers import msgpack
 from ...helpers import msgpack
 from ...manifest import Manifest
 from ...manifest import Manifest
 from ...repository import Repository
 from ...repository import Repository
-from . import ArchiverTestCaseBase, RemoteArchiverTestCaseBase, ArchiverTestCaseBinaryBase, RK_ENCRYPTION, BORG_EXES
-from . import src_file
-
-
-class ArchiverCheckTestCase(ArchiverTestCaseBase):
-    def setUp(self):
-        super().setUp()
-        with patch.object(ChunkBuffer, "BUFFER_SIZE", 10):
-            self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-            self.create_src_archive("archive1")
-            self.create_src_archive("archive2")
-
-    def test_check_usage(self):
-        output = self.cmd(f"--repo={self.repository_location}", "check", "-v", "--progress", exit_code=0)
-        self.assert_in("Starting repository check", output)
-        self.assert_in("Starting archive consistency check", output)
-        self.assert_in("Checking segments", output)
-        output = self.cmd(f"--repo={self.repository_location}", "check", "-v", "--repository-only", exit_code=0)
-        self.assert_in("Starting repository check", output)
-        self.assert_not_in("Starting archive consistency check", output)
-        self.assert_not_in("Checking segments", output)
-        output = self.cmd(f"--repo={self.repository_location}", "check", "-v", "--archives-only", exit_code=0)
-        self.assert_not_in("Starting repository check", output)
-        self.assert_in("Starting archive consistency check", output)
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "check",
-            "-v",
-            "--archives-only",
-            "--match-archives=archive2",
-            exit_code=0,
-        )
-        self.assert_not_in("archive1", output)
-        output = self.cmd(
-            f"--repo={self.repository_location}", "check", "-v", "--archives-only", "--first=1", exit_code=0
-        )
-        self.assert_in("archive1", output)
-        self.assert_not_in("archive2", output)
-        output = self.cmd(
-            f"--repo={self.repository_location}", "check", "-v", "--archives-only", "--last=1", exit_code=0
-        )
-        self.assert_not_in("archive1", output)
-        self.assert_in("archive2", output)
-
-    def test_date_matching(self):
-        shutil.rmtree(self.repository_path)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        earliest_ts = "2022-11-20T23:59:59"
-        ts_in_between = "2022-12-18T23:59:59"
-        self.create_src_archive("archive1", ts=earliest_ts)
-        self.create_src_archive("archive2", ts=ts_in_between)
-        self.create_src_archive("archive3")
-        output = self.cmd(
-            f"--repo={self.repository_location}", "check", "-v", "--archives-only", "--oldest=23e", exit_code=2
-        )
-        output = self.cmd(
-            f"--repo={self.repository_location}", "check", "-v", "--archives-only", "--oldest=1m", exit_code=0
-        )
-        self.assert_in("archive1", output)
-        self.assert_in("archive2", output)
-        self.assert_not_in("archive3", output)
+from . import cmd, src_file, create_src_archive, open_archive, generate_archiver_tests, RK_ENCRYPTION
 
 
-        output = self.cmd(
-            f"--repo={self.repository_location}", "check", "-v", "--archives-only", "--newest=1m", exit_code=0
-        )
-        self.assert_in("archive3", output)
-        self.assert_not_in("archive2", output)
-        self.assert_not_in("archive1", output)
-        output = self.cmd(
-            f"--repo={self.repository_location}", "check", "-v", "--archives-only", "--newer=1d", exit_code=0
-        )
-        self.assert_in("archive3", output)
-        self.assert_not_in("archive1", output)
-        self.assert_not_in("archive2", output)
-        output = self.cmd(
-            f"--repo={self.repository_location}", "check", "-v", "--archives-only", "--older=1d", exit_code=0
-        )
-        self.assert_in("archive1", output)
-        self.assert_in("archive2", output)
-        self.assert_not_in("archive3", output)
+pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary")  # NOQA
 
 
-        # check for output when timespan older than earliest archive is given. Issue #1711
-        output = self.cmd(
-            f"--repo={self.repository_location}", "check", "-v", "--archives-only", "--older=9999m", exit_code=0
-        )
-        for archive in ("archive1", "archive2", "archive3"):
-            self.assert_not_in(archive, output)
 
 
-    def test_missing_file_chunk(self):
-        archive, repository = self.open_archive("archive1")
+def check_cmd_setup(archiver):
+    with patch.object(ChunkBuffer, "BUFFER_SIZE", 10):
+        cmd(archiver, f"--repo={archiver.repository_location}", "rcreate", RK_ENCRYPTION)
+        create_src_archive(archiver, "archive1")
+        create_src_archive(archiver, "archive2")
+
+
+def test_check_usage(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    check_cmd_setup(archiver)
+    repo_location = archiver.repository_location
+
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--progress", exit_code=0)
+    assert "Starting repository check" in output
+    assert "Starting archive consistency check" in output
+    assert "Checking segments" in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--repository-only", exit_code=0)
+    assert "Starting repository check" in output
+    assert "Starting archive consistency check" not in output
+    assert "Checking segments" not in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--archives-only", exit_code=0)
+    assert "Starting repository check" not in output
+    assert "Starting archive consistency check" in output
+
+    output = cmd(
+        archiver, f"--repo={repo_location}", "check", "-v", "--archives-only", "--match-archives=archive2", exit_code=0
+    )
+    assert "archive1" not in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--archives-only", "--first=1", exit_code=0)
+    assert "archive1" in output
+    assert "archive2" not in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--archives-only", "--last=1", exit_code=0)
+    assert "archive1" not in output
+    assert "archive2" in output
+
+
+def test_date_matching(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    check_cmd_setup(archiver)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+
+    shutil.rmtree(repo_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    earliest_ts = "2022-11-20T23:59:59"
+    ts_in_between = "2022-12-18T23:59:59"
+    create_src_archive(archiver, "archive1", ts=earliest_ts)
+    create_src_archive(archiver, "archive2", ts=ts_in_between)
+    create_src_archive(archiver, "archive3")
+    cmd(archiver, f"--repo={repo_location}", "check", "-v", "--archives-only", "--oldest=23e", exit_code=2)
+
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--archives-only", "--oldest=1m", exit_code=0)
+    assert "archive1" in output
+    assert "archive2" in output
+    assert "archive3" not in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--archives-only", "--newest=1m", exit_code=0)
+    assert "archive3" in output
+    assert "archive2" not in output
+    assert "archive1" not in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--archives-only", "--newer=1d", exit_code=0)
+    assert "archive3" in output
+    assert "archive1" not in output
+    assert "archive2" not in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--archives-only", "--older=1d", exit_code=0)
+    assert "archive1" in output
+    assert "archive2" in output
+    assert "archive3" not in output
+
+    # check for output when timespan older than the earliest archive is given. Issue #1711
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--archives-only", "--older=9999m", exit_code=0)
+    for archive in ("archive1", "archive2", "archive3"):
+        assert archive not in output
+
+
+def test_missing_file_chunk(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    check_cmd_setup(archiver)
+
+    archive, repository = open_archive(repo_path, "archive1")
+
+    with repository:
+        for item in archive.iter_items():
+            if item.path.endswith(src_file):
+                valid_chunks = item.chunks
+                killed_chunk = valid_chunks[-1]
+                repository.delete(killed_chunk.id)
+                break
+        else:
+            pytest.fail("should not happen")  # convert 'fail'
+        repository.commit(compact=False)
+
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)
+    output = cmd(archiver, f"--repo={repo_location}", "check", "--repair", exit_code=0)
+    assert "New missing file chunk detected" in output
+
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=0)
+    output = cmd(archiver, f"--repo={repo_location}", "list", "archive1", "--format={health}#{path}{NL}", exit_code=0)
+    assert "broken#" in output
+
+    # check that the file in the old archives has now a different chunk list without the killed chunk
+    for archive_name in ("archive1", "archive2"):
+        archive, repository = open_archive(repo_path, archive_name)
         with repository:
         with repository:
             for item in archive.iter_items():
             for item in archive.iter_items():
                 if item.path.endswith(src_file):
                 if item.path.endswith(src_file):
-                    valid_chunks = item.chunks
-                    killed_chunk = valid_chunks[-1]
-                    repository.delete(killed_chunk.id)
+                    assert valid_chunks != item.chunks
+                    assert killed_chunk not in item.chunks
                     break
                     break
             else:
             else:
-                self.fail("should not happen")
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
-        output = self.cmd(f"--repo={self.repository_location}", "check", "--repair", exit_code=0)
-        self.assert_in("New missing file chunk detected", output)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=0)
-        output = self.cmd(
-            f"--repo={self.repository_location}", "list", "archive1", "--format={health}#{path}{NL}", exit_code=0
-        )
-        self.assert_in("broken#", output)
-        # check that the file in the old archives has now a different chunk list without the killed chunk
-        for archive_name in ("archive1", "archive2"):
-            archive, repository = self.open_archive(archive_name)
-            with repository:
-                for item in archive.iter_items():
-                    if item.path.endswith(src_file):
-                        self.assert_not_equal(valid_chunks, item.chunks)
-                        self.assert_not_in(killed_chunk, item.chunks)
-                        break
-                else:
-                    self.fail("should not happen")
-        # do a fresh backup (that will include the killed chunk)
-        with patch.object(ChunkBuffer, "BUFFER_SIZE", 10):
-            self.create_src_archive("archive3")
-        # check should be able to heal the file now:
-        output = self.cmd(f"--repo={self.repository_location}", "check", "-v", "--repair", exit_code=0)
-        self.assert_in("Healed previously missing file chunk", output)
-        self.assert_in(f"{src_file}: Completely healed previously damaged file!", output)
-        # check that the file in the old archives has the correct chunks again
-        for archive_name in ("archive1", "archive2"):
-            archive, repository = self.open_archive(archive_name)
-            with repository:
-                for item in archive.iter_items():
-                    if item.path.endswith(src_file):
-                        self.assert_equal(valid_chunks, item.chunks)
-                        break
-                else:
-                    self.fail("should not happen")
-        # list is also all-healthy again
-        output = self.cmd(
-            f"--repo={self.repository_location}", "list", "archive1", "--format={health}#{path}{NL}", exit_code=0
-        )
-        self.assert_not_in("broken#", output)
+                pytest.fail("should not happen")  # convert 'fail'
 
 
-    def test_missing_archive_item_chunk(self):
-        archive, repository = self.open_archive("archive1")
-        with repository:
-            repository.delete(archive.metadata.items[0])
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
-        self.cmd(f"--repo={self.repository_location}", "check", "--repair", exit_code=0)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=0)
-
-    def test_missing_archive_metadata(self):
-        archive, repository = self.open_archive("archive1")
-        with repository:
-            repository.delete(archive.id)
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
-        self.cmd(f"--repo={self.repository_location}", "check", "--repair", exit_code=0)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=0)
-
-    def test_missing_manifest(self):
-        archive, repository = self.open_archive("archive1")
-        with repository:
-            repository.delete(Manifest.MANIFEST_ID)
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
-        output = self.cmd(f"--repo={self.repository_location}", "check", "-v", "--repair", exit_code=0)
-        self.assert_in("archive1", output)
-        self.assert_in("archive2", output)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=0)
-
-    def test_corrupted_manifest(self):
-        archive, repository = self.open_archive("archive1")
-        with repository:
-            manifest = repository.get(Manifest.MANIFEST_ID)
-            corrupted_manifest = manifest + b"corrupted!"
-            repository.put(Manifest.MANIFEST_ID, corrupted_manifest)
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
-        output = self.cmd(f"--repo={self.repository_location}", "check", "-v", "--repair", exit_code=0)
-        self.assert_in("archive1", output)
-        self.assert_in("archive2", output)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=0)
-
-    def test_manifest_rebuild_corrupted_chunk(self):
-        archive, repository = self.open_archive("archive1")
-        with repository:
-            manifest = repository.get(Manifest.MANIFEST_ID)
-            corrupted_manifest = manifest + b"corrupted!"
-            repository.put(Manifest.MANIFEST_ID, corrupted_manifest)
-
-            chunk = repository.get(archive.id)
-            corrupted_chunk = chunk + b"corrupted!"
-            repository.put(archive.id, corrupted_chunk)
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
-        output = self.cmd(f"--repo={self.repository_location}", "check", "-v", "--repair", exit_code=0)
-        self.assert_in("archive2", output)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=0)
-
-    def test_manifest_rebuild_duplicate_archive(self):
-        archive, repository = self.open_archive("archive1")
-        repo_objs = archive.repo_objs
+    # do a fresh backup (that will include the killed chunk)
+    with patch.object(ChunkBuffer, "BUFFER_SIZE", 10):
+        create_src_archive(archiver, "archive3")
 
 
-        with repository:
-            manifest = repository.get(Manifest.MANIFEST_ID)
-            corrupted_manifest = manifest + b"corrupted!"
-            repository.put(Manifest.MANIFEST_ID, corrupted_manifest)
-
-            archive = msgpack.packb(
-                {
-                    "command_line": "",
-                    "item_ptrs": [],
-                    "hostname": "foo",
-                    "username": "bar",
-                    "name": "archive1",
-                    "time": "2016-12-15T18:49:51.849711",
-                    "version": 2,
-                }
-            )
-            archive_id = repo_objs.id_hash(archive)
-            repository.put(archive_id, repo_objs.format(archive_id, {}, archive))
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
-        self.cmd(f"--repo={self.repository_location}", "check", "--repair", exit_code=0)
-        output = self.cmd(f"--repo={self.repository_location}", "rlist")
-        self.assert_in("archive1", output)
-        self.assert_in("archive1.1", output)
-        self.assert_in("archive2", output)
-
-    def test_extra_chunks(self):
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=0)
-        with Repository(self.repository_location, exclusive=True) as repository:
-            repository.put(b"01234567890123456789012345678901", b"xxxx")
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
-        self.cmd(f"--repo={self.repository_location}", "check", "--repair", exit_code=0)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=0)
-        self.cmd(f"--repo={self.repository_location}", "extract", "archive1", "--dry-run", exit_code=0)
-
-    def _test_verify_data(self, *init_args):
-        shutil.rmtree(self.repository_path)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", *init_args)
-        self.create_src_archive("archive1")
-        archive, repository = self.open_archive("archive1")
+    # check should be able to heal the file now:
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--repair", exit_code=0)
+    assert "Healed previously missing file chunk" in output
+    assert f"{src_file}: Completely healed previously damaged file!" in output
+
+    # check that the file in the old archives has the correct chunks again
+    for archive_name in ("archive1", "archive2"):
+        archive, repository = open_archive(repo_path, archive_name)
         with repository:
         with repository:
             for item in archive.iter_items():
             for item in archive.iter_items():
                 if item.path.endswith(src_file):
                 if item.path.endswith(src_file):
-                    chunk = item.chunks[-1]
-                    data = repository.get(chunk.id)
-                    data = data[0:100] + b"x" + data[101:]
-                    repository.put(chunk.id, data)
+                    assert valid_chunks == item.chunks
                     break
                     break
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=0)
-        output = self.cmd(f"--repo={self.repository_location}", "check", "--verify-data", exit_code=1)
-        assert bin_to_hex(chunk.id) + ", integrity error" in output
-        # repair (heal is tested in another test)
-        output = self.cmd(f"--repo={self.repository_location}", "check", "--repair", "--verify-data", exit_code=0)
-        assert bin_to_hex(chunk.id) + ", integrity error" in output
-        assert f"{src_file}: New missing file chunk detected" in output
+            else:
+                pytest.fail("should not happen")
+
+    # list is also all-healthy again
+    output = cmd(archiver, f"--repo={repo_location}", "list", "archive1", "--format={health}#{path}{NL}", exit_code=0)
+    assert "broken#" not in output
+
+
+def test_missing_archive_item_chunk(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    check_cmd_setup(archiver)
+    archive, repository = open_archive(repo_path, "archive1")
+
+    with repository:
+        repository.delete(archive.metadata.items[0])
+        repository.commit(compact=False)
+
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)
+    cmd(archiver, f"--repo={repo_location}", "check", "--repair", exit_code=0)
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=0)
+
+
+def test_missing_archive_metadata(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    check_cmd_setup(archiver)
+    archive, repository = open_archive(repo_path, "archive1")
+
+    with repository:
+        repository.delete(archive.id)
+        repository.commit(compact=False)
+
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)
+    cmd(archiver, f"--repo={repo_location}", "check", "--repair", exit_code=0)
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=0)
+
+
+def test_missing_manifest(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    check_cmd_setup(archiver)
+    archive, repository = open_archive(repo_path, "archive1")
+
+    with repository:
+        repository.delete(Manifest.MANIFEST_ID)
+        repository.commit(compact=False)
+
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--repair", exit_code=0)
+    assert "archive1" in output
+    assert "archive2" in output
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=0)
+
+
+def test_corrupted_manifest(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    check_cmd_setup(archiver)
+    archive, repository = open_archive(repo_path, "archive1")
+
+    with repository:
+        manifest = repository.get(Manifest.MANIFEST_ID)
+        corrupted_manifest = manifest + b"corrupted!"
+        repository.put(Manifest.MANIFEST_ID, corrupted_manifest)
+        repository.commit(compact=False)
+
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--repair", exit_code=0)
+    assert "archive1" in output
+    assert "archive2" in output
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=0)
+
+
+def test_manifest_rebuild_corrupted_chunk(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    check_cmd_setup(archiver)
+    archive, repository = open_archive(repo_path, "archive1")
+
+    with repository:
+        manifest = repository.get(Manifest.MANIFEST_ID)
+        corrupted_manifest = manifest + b"corrupted!"
+        repository.put(Manifest.MANIFEST_ID, corrupted_manifest)
+        chunk = repository.get(archive.id)
+        corrupted_chunk = chunk + b"corrupted!"
+        repository.put(archive.id, corrupted_chunk)
+        repository.commit(compact=False)
+
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)
+    output = cmd(archiver, f"--repo={repo_location}", "check", "-v", "--repair", exit_code=0)
+    assert "archive2" in output
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=0)
+
+
+def test_manifest_rebuild_duplicate_archive(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    check_cmd_setup(archiver)
+    archive, repository = open_archive(repo_path, "archive1")
+    repo_objs = archive.repo_objs
+
+    with repository:
+        manifest = repository.get(Manifest.MANIFEST_ID)
+        corrupted_manifest = manifest + b"corrupted!"
+        repository.put(Manifest.MANIFEST_ID, corrupted_manifest)
+        archive = msgpack.packb(
+            {
+                "command_line": "",
+                "item_ptrs": [],
+                "hostname": "foo",
+                "username": "bar",
+                "name": "archive1",
+                "time": "2016-12-15T18:49:51.849711",
+                "version": 2,
+            }
+        )
+        archive_id = repo_objs.id_hash(archive)
+        repository.put(archive_id, repo_objs.format(archive_id, {}, archive))
+        repository.commit(compact=False)
+
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)
+    cmd(archiver, f"--repo={repo_location}", "check", "--repair", exit_code=0)
+    output = cmd(archiver, f"--repo={repo_location}", "rlist")
+    assert "archive1" in output
+    assert "archive1.1" in output
+    assert "archive2" in output
+
+
+def test_extra_chunks(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    if archiver.get_kind() == "remote":
+        pytest.skip("only works locally")
+    repo_location = archiver.repository_location
+    check_cmd_setup(archiver)
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=0)
+
+    with Repository(repo_location, exclusive=True) as repository:
+        repository.put(b"01234567890123456789012345678901", b"xxxx")
+        repository.commit(compact=False)
+
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)
+    cmd(archiver, f"--repo={repo_location}", "check", "--repair", exit_code=0)
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=0)
+    cmd(archiver, f"--repo={repo_location}", "extract", "archive1", "--dry-run", exit_code=0)
 
 
-    def test_verify_data(self):
-        self._test_verify_data(RK_ENCRYPTION)
 
 
-    def test_verify_data_unencrypted(self):
-        self._test_verify_data("--encryption", "none")
+@pytest.mark.parametrize("init_args", [["--encryption=repokey-aes-ocb"], ["--encryption", "none"]])
+def test_verify_data(archivers, request, init_args):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    check_cmd_setup(archiver)
+    shutil.rmtree(repo_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", *init_args)
+    create_src_archive(archiver, "archive1")
+    archive, repository = open_archive(repo_path, "archive1")
 
 
-    def test_empty_repository(self):
-        with Repository(self.repository_location, exclusive=True) as repository:
-            for id_ in repository.list():
-                repository.delete(id_)
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
+    with repository:
+        for item in archive.iter_items():
+            if item.path.endswith(src_file):
+                chunk = item.chunks[-1]
+                data = repository.get(chunk.id)
+                data = data[0:100] + b"x" + data[101:]
+                repository.put(chunk.id, data)
+                break
+        repository.commit(compact=False)
 
 
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=0)
+    output = cmd(archiver, f"--repo={repo_location}", "check", "--verify-data", exit_code=1)
+    assert bin_to_hex(chunk.id) + ", integrity error" in output
 
 
-class RemoteArchiverCheckTestCase(RemoteArchiverTestCaseBase, ArchiverCheckTestCase):
-    """run the same tests, but with a remote repository"""
+    # repair (heal is tested in another test)
+    output = cmd(archiver, f"--repo={repo_location}", "check", "--repair", "--verify-data", exit_code=0)
+    assert bin_to_hex(chunk.id) + ", integrity error" in output
+    assert f"{src_file}: New missing file chunk detected" in output
 
 
-    @unittest.skip("only works locally")
-    def test_empty_repository(self):
-        pass
 
 
-    @unittest.skip("only works locally")
-    def test_extra_chunks(self):
-        pass
+def test_empty_repository(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    if archiver.get_kind() == "remote":
+        pytest.skip("only works locally")
+    repo_location = archiver.repository_location
+    check_cmd_setup(archiver)
 
 
+    with Repository(repo_location, exclusive=True) as repository:
+        for id_ in repository.list():
+            repository.delete(id_)
+        repository.commit(compact=False)
 
 
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverCheckTestCase):
-    """runs the same tests, but via the borg binary"""
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)

+ 429 - 376
src/borg/testsuite/archiver/checks.py

@@ -1,6 +1,5 @@
 import os
 import os
 import shutil
 import shutil
-import unittest
 from datetime import datetime, timezone, timedelta
 from datetime import datetime, timezone, timedelta
 from unittest.mock import patch
 from unittest.mock import patch
 
 
@@ -9,391 +8,445 @@ import pytest
 from ...cache import Cache, LocalCache
 from ...cache import Cache, LocalCache
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
 from ...crypto.key import TAMRequiredError
 from ...crypto.key import TAMRequiredError
-from ...helpers import Location, get_security_dir
+from ...helpers import Location, get_security_dir, bin_to_hex
 from ...helpers import EXIT_ERROR
 from ...helpers import EXIT_ERROR
-from ...helpers import bin_to_hex
 from ...helpers import msgpack
 from ...helpers import msgpack
 from ...manifest import Manifest, MandatoryFeatureUnsupported
 from ...manifest import Manifest, MandatoryFeatureUnsupported
 from ...remote import RemoteRepository, PathNotAllowed
 from ...remote import RemoteRepository, PathNotAllowed
 from ...repository import Repository
 from ...repository import Repository
 from .. import llfuse
 from .. import llfuse
 from .. import changedir, environment_variable
 from .. import changedir, environment_variable
-from . import ArchiverTestCaseBase, RemoteArchiverTestCaseBase, RK_ENCRYPTION
-
-
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def get_security_dir(self):
-        repository_id = bin_to_hex(self._extract_repository_id(self.repository_path))
-        return get_security_dir(repository_id)
-
-    def test_repository_swap_detection(self):
-        self.create_test_files()
-        os.environ["BORG_PASSPHRASE"] = "passphrase"
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        repository_id = self._extract_repository_id(self.repository_path)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        shutil.rmtree(self.repository_path)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        self._set_repository_id(self.repository_path, repository_id)
-        self.assert_equal(repository_id, self._extract_repository_id(self.repository_path))
-        if self.FORK_DEFAULT:
-            self.cmd(f"--repo={self.repository_location}", "create", "test.2", "input", exit_code=EXIT_ERROR)
-        else:
-            with pytest.raises(Cache.EncryptionMethodMismatch):
-                self.cmd(f"--repo={self.repository_location}", "create", "test.2", "input")
-
-    def test_repository_swap_detection2(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}_unencrypted", "rcreate", "--encryption=none")
-        os.environ["BORG_PASSPHRASE"] = "passphrase"
-        self.cmd(f"--repo={self.repository_location}_encrypted", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}_encrypted", "create", "test", "input")
-        shutil.rmtree(self.repository_path + "_encrypted")
-        os.replace(self.repository_path + "_unencrypted", self.repository_path + "_encrypted")
-        if self.FORK_DEFAULT:
-            self.cmd(f"--repo={self.repository_location}_encrypted", "create", "test.2", "input", exit_code=EXIT_ERROR)
-        else:
-            with pytest.raises(Cache.RepositoryAccessAborted):
-                self.cmd(f"--repo={self.repository_location}_encrypted", "create", "test.2", "input")
-
-    def test_repository_swap_detection_no_cache(self):
-        self.create_test_files()
-        os.environ["BORG_PASSPHRASE"] = "passphrase"
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        repository_id = self._extract_repository_id(self.repository_path)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        shutil.rmtree(self.repository_path)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        self._set_repository_id(self.repository_path, repository_id)
-        self.assert_equal(repository_id, self._extract_repository_id(self.repository_path))
-        self.cmd(f"--repo={self.repository_location}", "rdelete", "--cache-only")
-        if self.FORK_DEFAULT:
-            self.cmd(f"--repo={self.repository_location}", "create", "test.2", "input", exit_code=EXIT_ERROR)
-        else:
-            with pytest.raises(Cache.EncryptionMethodMismatch):
-                self.cmd(f"--repo={self.repository_location}", "create", "test.2", "input")
-
-    def test_repository_swap_detection2_no_cache(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}_unencrypted", "rcreate", "--encryption=none")
-        os.environ["BORG_PASSPHRASE"] = "passphrase"
-        self.cmd(f"--repo={self.repository_location}_encrypted", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}_encrypted", "create", "test", "input")
-        self.cmd(f"--repo={self.repository_location}_unencrypted", "rdelete", "--cache-only")
-        self.cmd(f"--repo={self.repository_location}_encrypted", "rdelete", "--cache-only")
-        shutil.rmtree(self.repository_path + "_encrypted")
-        os.replace(self.repository_path + "_unencrypted", self.repository_path + "_encrypted")
-        if self.FORK_DEFAULT:
-            self.cmd(f"--repo={self.repository_location}_encrypted", "create", "test.2", "input", exit_code=EXIT_ERROR)
-        else:
-            with pytest.raises(Cache.RepositoryAccessAborted):
-                self.cmd(f"--repo={self.repository_location}_encrypted", "create", "test.2", "input")
-
-    def test_repository_swap_detection_repokey_blank_passphrase(self):
-        # Check that a repokey repo with a blank passphrase is considered like a plaintext repo.
-        self.create_test_files()
-        # User initializes her repository with her passphrase
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        # Attacker replaces it with her own repository, which is encrypted but has no passphrase set
-        shutil.rmtree(self.repository_path)
-        with environment_variable(BORG_PASSPHRASE=""):
-            self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-            # Delete cache & security database, AKA switch to user perspective
-            self.cmd(f"--repo={self.repository_location}", "rdelete", "--cache-only")
-            shutil.rmtree(self.get_security_dir())
-        with environment_variable(BORG_PASSPHRASE=None):
-            # This is the part were the user would be tricked, e.g. she assumes that BORG_PASSPHRASE
-            # is set, while it isn't. Previously this raised no warning,
-            # since the repository is, technically, encrypted.
-            if self.FORK_DEFAULT:
-                self.cmd(f"--repo={self.repository_location}", "create", "test.2", "input", exit_code=EXIT_ERROR)
-            else:
-                with pytest.raises(Cache.CacheInitAbortedError):
-                    self.cmd(f"--repo={self.repository_location}", "create", "test.2", "input")
-
-    def test_repository_move(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        security_dir = self.get_security_dir()
-        os.replace(self.repository_path, self.repository_path + "_new")
-        with environment_variable(BORG_RELOCATED_REPO_ACCESS_IS_OK="yes"):
-            self.cmd(f"--repo={self.repository_location}_new", "rinfo")
-        with open(os.path.join(security_dir, "location")) as fd:
-            location = fd.read()
-            assert location == Location(self.repository_location + "_new").canonical_path()
-        # Needs no confirmation anymore
-        self.cmd(f"--repo={self.repository_location}_new", "rinfo")
-        shutil.rmtree(self.cache_path)
-        self.cmd(f"--repo={self.repository_location}_new", "rinfo")
-        shutil.rmtree(security_dir)
-        self.cmd(f"--repo={self.repository_location}_new", "rinfo")
-        for file in ("location", "key-type", "manifest-timestamp"):
-            assert os.path.exists(os.path.join(security_dir, file))
-
-    def test_security_dir_compat(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        with open(os.path.join(self.get_security_dir(), "location"), "w") as fd:
-            fd.write("something outdated")
-        # This is fine, because the cache still has the correct information. security_dir and cache can disagree
-        # if older versions are used to confirm a renamed repository.
-        self.cmd(f"--repo={self.repository_location}", "rinfo")
-
-    def test_unknown_unencrypted(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        # Ok: repository is known
-        self.cmd(f"--repo={self.repository_location}", "rinfo")
-
-        # Ok: repository is still known (through security_dir)
-        shutil.rmtree(self.cache_path)
-        self.cmd(f"--repo={self.repository_location}", "rinfo")
-
-        # Needs confirmation: cache and security dir both gone (eg. another host or rm -rf ~)
-        shutil.rmtree(self.cache_path)
-        shutil.rmtree(self.get_security_dir())
-        if self.FORK_DEFAULT:
-            self.cmd(f"--repo={self.repository_location}", "rinfo", exit_code=EXIT_ERROR)
+from . import cmd, _extract_repository_id, open_repository, check_cache, create_test_files, create_src_archive
+from . import _set_repository_id, create_regular_file, assert_creates_file, generate_archiver_tests, RK_ENCRYPTION
+
+pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote")  # NOQA
+
+
+def get_security_directory(repo_path):
+    repository_id = bin_to_hex(_extract_repository_id(repo_path))
+    return get_security_dir(repository_id)
+
+
+def add_unknown_feature(repo_path, operation):
+    with Repository(repo_path, exclusive=True) as repository:
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+        manifest.config["feature_flags"] = {operation.value: {"mandatory": ["unknown-feature"]}}
+        manifest.write()
+        repository.commit(compact=False)
+
+
+def cmd_raises_unknown_feature(archiver, args):
+    if archiver.FORK_DEFAULT:
+        cmd(archiver, *args, exit_code=EXIT_ERROR)
+    else:
+        with pytest.raises(MandatoryFeatureUnsupported) as excinfo:
+            cmd(archiver, *args)
+        assert excinfo.value.args == (["unknown-feature"],)
+
+
+def test_repository_swap_detection(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, input_path = archiver.repository_location, archiver.repository_path, archiver.input_path
+    create_test_files(input_path)
+    os.environ["BORG_PASSPHRASE"] = "passphrase"
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    repository_id = _extract_repository_id(repo_path)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    shutil.rmtree(repo_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    _set_repository_id(repo_path, repository_id)
+    assert repository_id == _extract_repository_id(repo_path)
+    if archiver.FORK_DEFAULT:
+        cmd(archiver, f"--repo={repo_location}", "create", "test.2", "input", exit_code=EXIT_ERROR)
+    else:
+        with pytest.raises(Cache.EncryptionMethodMismatch):
+            cmd(archiver, f"--repo={repo_location}", "create", "test.2", "input")
+
+
+def test_repository_swap_detection2(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, input_path = archiver.repository_location, archiver.repository_path, archiver.input_path
+    create_test_files(input_path)
+    cmd(archiver, f"--repo={repo_location}_unencrypted", "rcreate", "--encryption=none")
+    os.environ["BORG_PASSPHRASE"] = "passphrase"
+    cmd(archiver, f"--repo={repo_location}_encrypted", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}_encrypted", "create", "test", "input")
+    shutil.rmtree(repo_path + "_encrypted")
+    os.replace(repo_path + "_unencrypted", repo_path + "_encrypted")
+    if archiver.FORK_DEFAULT:
+        cmd(archiver, f"--repo={repo_location}_encrypted", "create", "test.2", "input", exit_code=EXIT_ERROR)
+    else:
+        with pytest.raises(Cache.RepositoryAccessAborted):
+            cmd(archiver, f"--repo={repo_location}_encrypted", "create", "test.2", "input")
+
+
+def test_repository_swap_detection_no_cache(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, input_path = archiver.repository_location, archiver.repository_path, archiver.input_path
+    create_test_files(input_path)
+    os.environ["BORG_PASSPHRASE"] = "passphrase"
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    repository_id = _extract_repository_id(repo_path)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    shutil.rmtree(repo_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    _set_repository_id(repo_path, repository_id)
+    assert repository_id == _extract_repository_id(repo_path)
+    cmd(archiver, f"--repo={repo_location}", "rdelete", "--cache-only")
+    if archiver.FORK_DEFAULT:
+        cmd(archiver, f"--repo={repo_location}", "create", "test.2", "input", exit_code=EXIT_ERROR)
+    else:
+        with pytest.raises(Cache.EncryptionMethodMismatch):
+            cmd(archiver, f"--repo={repo_location}", "create", "test.2", "input")
+
+
+def test_repository_swap_detection2_no_cache(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, input_path = archiver.repository_location, archiver.repository_path, archiver.input_path
+    create_test_files(input_path)
+    cmd(archiver, f"--repo={repo_location}_unencrypted", "rcreate", "--encryption=none")
+    os.environ["BORG_PASSPHRASE"] = "passphrase"
+    cmd(archiver, f"--repo={repo_location}_encrypted", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}_encrypted", "create", "test", "input")
+    cmd(archiver, f"--repo={repo_location}_unencrypted", "rdelete", "--cache-only")
+    cmd(archiver, f"--repo={repo_location}_encrypted", "rdelete", "--cache-only")
+    shutil.rmtree(repo_path + "_encrypted")
+    os.replace(repo_path + "_unencrypted", repo_path + "_encrypted")
+    if archiver.FORK_DEFAULT:
+        cmd(archiver, f"--repo={repo_location}_encrypted", "create", "test.2", "input", exit_code=EXIT_ERROR)
+    else:
+        with pytest.raises(Cache.RepositoryAccessAborted):
+            cmd(archiver, f"--repo={repo_location}_encrypted", "create", "test.2", "input")
+
+
+def test_repository_swap_detection_repokey_blank_passphrase(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, input_path = archiver.repository_location, archiver.repository_path, archiver.input_path
+    # Check that a repokey repo with a blank passphrase is considered like a plaintext repo.
+    create_test_files(input_path)
+    # User initializes her repository with her passphrase
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    # Attacker replaces it with her own repository, which is encrypted but has no passphrase set
+    shutil.rmtree(repo_path)
+    with environment_variable(BORG_PASSPHRASE=""):
+        cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+        # Delete cache & security database, AKA switch to user perspective
+        cmd(archiver, f"--repo={repo_location}", "rdelete", "--cache-only")
+        shutil.rmtree(get_security_directory(repo_path))
+    with environment_variable(BORG_PASSPHRASE=None):
+        # This is the part were the user would be tricked, e.g. she assumes that BORG_PASSPHRASE
+        # is set, while it isn't. Previously this raised no warning,
+        # since the repository is, technically, encrypted.
+        if archiver.FORK_DEFAULT:
+            cmd(archiver, f"--repo={repo_location}", "create", "test.2", "input", exit_code=EXIT_ERROR)
         else:
         else:
             with pytest.raises(Cache.CacheInitAbortedError):
             with pytest.raises(Cache.CacheInitAbortedError):
-                self.cmd(f"--repo={self.repository_location}", "rinfo")
-        with environment_variable(BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK="yes"):
-            self.cmd(f"--repo={self.repository_location}", "rinfo")
-
-    def add_unknown_feature(self, operation):
-        with Repository(self.repository_path, exclusive=True) as repository:
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-            manifest.config["feature_flags"] = {operation.value: {"mandatory": ["unknown-feature"]}}
-            manifest.write()
-            repository.commit(compact=False)
-
-    def cmd_raises_unknown_feature(self, args):
-        if self.FORK_DEFAULT:
-            self.cmd(*args, exit_code=EXIT_ERROR)
-        else:
-            with pytest.raises(MandatoryFeatureUnsupported) as excinfo:
-                self.cmd(*args)
-            assert excinfo.value.args == (["unknown-feature"],)
-
-    def test_unknown_feature_on_create(self):
-        print(self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION))
-        self.add_unknown_feature(Manifest.Operation.WRITE)
-        self.cmd_raises_unknown_feature([f"--repo={self.repository_location}", "create", "test", "input"])
-
-    def test_unknown_feature_on_cache_sync(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "rdelete", "--cache-only")
-        self.add_unknown_feature(Manifest.Operation.READ)
-        self.cmd_raises_unknown_feature([f"--repo={self.repository_location}", "create", "test", "input"])
-
-    def test_unknown_feature_on_change_passphrase(self):
-        print(self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION))
-        self.add_unknown_feature(Manifest.Operation.CHECK)
-        self.cmd_raises_unknown_feature([f"--repo={self.repository_location}", "key", "change-passphrase"])
-
-    def test_unknown_feature_on_read(self):
-        print(self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION))
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.add_unknown_feature(Manifest.Operation.READ)
-        with changedir("output"):
-            self.cmd_raises_unknown_feature([f"--repo={self.repository_location}", "extract", "test"])
-
-        self.cmd_raises_unknown_feature([f"--repo={self.repository_location}", "rlist"])
-        self.cmd_raises_unknown_feature([f"--repo={self.repository_location}", "info", "-a", "test"])
-
-    def test_unknown_feature_on_rename(self):
-        print(self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION))
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.add_unknown_feature(Manifest.Operation.CHECK)
-        self.cmd_raises_unknown_feature([f"--repo={self.repository_location}", "rename", "test", "other"])
-
-    def test_unknown_feature_on_delete(self):
-        print(self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION))
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.add_unknown_feature(Manifest.Operation.DELETE)
-        # delete of an archive raises
-        self.cmd_raises_unknown_feature([f"--repo={self.repository_location}", "delete", "-a", "test"])
-        self.cmd_raises_unknown_feature([f"--repo={self.repository_location}", "prune", "--keep-daily=3"])
-        # delete of the whole repository ignores features
-        self.cmd(f"--repo={self.repository_location}", "rdelete")
-
-    @unittest.skipUnless(llfuse, "llfuse not installed")
-    def test_unknown_feature_on_mount(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.add_unknown_feature(Manifest.Operation.READ)
-        mountpoint = os.path.join(self.tmpdir, "mountpoint")
-        os.mkdir(mountpoint)
-        # XXX this might hang if it doesn't raise an error
-        self.cmd_raises_unknown_feature([f"--repo={self.repository_location}::test", "mount", mountpoint])
-
-    @pytest.mark.allow_cache_wipe
-    def test_unknown_mandatory_feature_in_cache(self):
-        remote_repo = bool(self.prefix)
-        print(self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION))
-
-        with Repository(self.repository_path, exclusive=True) as repository:
-            if remote_repo:
-                repository._location = Location(self.repository_location)
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-            with Cache(repository, manifest) as cache:
-                cache.begin_txn()
-                cache.cache_config.mandatory_features = {"unknown-feature"}
-                cache.commit()
-
-        if self.FORK_DEFAULT:
-            self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        else:
-            called = False
-            wipe_cache_safe = LocalCache.wipe_cache
-
-            def wipe_wrapper(*args):
-                nonlocal called
-                called = True
-                wipe_cache_safe(*args)
-
-            with patch.object(LocalCache, "wipe_cache", wipe_wrapper):
-                self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-
-            assert called
-
-        with Repository(self.repository_path, exclusive=True) as repository:
-            if remote_repo:
-                repository._location = Location(self.repository_location)
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-            with Cache(repository, manifest) as cache:
-                assert cache.cache_config.mandatory_features == set()
-
-    def test_check_cache(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        with self.open_repository() as repository:
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-            with Cache(repository, manifest, sync=False) as cache:
-                cache.begin_txn()
-                cache.chunks.incref(list(cache.chunks.iteritems())[0][0])
-                cache.commit()
-        with pytest.raises(AssertionError):
-            self.check_cache()
-
-
-class ManifestAuthenticationTest(ArchiverTestCaseBase):
-    def spoof_manifest(self, repository):
-        with repository:
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-            cdata = manifest.repo_objs.format(
-                Manifest.MANIFEST_ID,
-                {},
-                msgpack.packb(
-                    {
-                        "version": 1,
-                        "archives": {},
-                        "config": {},
-                        "timestamp": (datetime.now(tz=timezone.utc) + timedelta(days=1)).isoformat(
-                            timespec="microseconds"
-                        ),
-                    }
-                ),
+                cmd(archiver, f"--repo={repo_location}", "create", "test.2", "input")
+
+
+def test_repository_move(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    security_dir = get_security_directory(repo_path)
+    os.replace(repo_path, repo_path + "_new")
+    with environment_variable(BORG_RELOCATED_REPO_ACCESS_IS_OK="yes"):
+        cmd(archiver, f"--repo={repo_location}_new", "rinfo")
+    with open(os.path.join(security_dir, "location")) as fd:
+        location = fd.read()
+        assert location == Location(repo_location + "_new").canonical_path()
+    # Needs no confirmation anymore
+    cmd(archiver, f"--repo={repo_location}_new", "rinfo")
+    shutil.rmtree(archiver.cache_path)
+    cmd(archiver, f"--repo={repo_location}_new", "rinfo")
+    shutil.rmtree(security_dir)
+    cmd(archiver, f"--repo={repo_location}_new", "rinfo")
+    for file in ("location", "key-type", "manifest-timestamp"):
+        assert os.path.exists(os.path.join(security_dir, file))
+
+
+def test_security_dir_compat(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    with open(os.path.join(get_security_directory(repo_path), "location"), "w") as fd:
+        fd.write("something outdated")
+    # This is fine, because the cache still has the correct information. security_dir and cache can disagree
+    # if older versions are used to confirm a renamed repository.
+    cmd(archiver, f"--repo={repo_location}", "rinfo")
+
+
+def test_unknown_unencrypted(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, cache_path = archiver.repository_location, archiver.repository_path, archiver.cache_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    # Ok: repository is known
+    cmd(archiver, f"--repo={repo_location}", "rinfo")
+
+    # Ok: repository is still known (through security_dir)
+    shutil.rmtree(cache_path)
+    cmd(archiver, f"--repo={repo_location}", "rinfo")
+
+    # Needs confirmation: cache and security dir both gone (e.g. another host or rm -rf ~)
+    shutil.rmtree(get_security_directory(repo_path))
+    if archiver.FORK_DEFAULT:
+        cmd(archiver, f"--repo={repo_location}", "rinfo", exit_code=EXIT_ERROR)
+    else:
+        with pytest.raises(Cache.CacheInitAbortedError):
+            cmd(archiver, f"--repo={repo_location}", "rinfo")
+    with environment_variable(BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK="yes"):
+        cmd(archiver, f"--repo={repo_location}", "rinfo")
+
+
+def test_unknown_feature_on_create(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    print(cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION))
+    add_unknown_feature(repo_path, Manifest.Operation.WRITE)
+    cmd_raises_unknown_feature(archiver, [f"--repo={repo_location}", "create", "test", "input"])
+
+
+def test_unknown_feature_on_cache_sync(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "rdelete", "--cache-only")
+    add_unknown_feature(repo_path, Manifest.Operation.READ)
+    cmd_raises_unknown_feature(archiver, [f"--repo={repo_location}", "create", "test", "input"])
+
+
+def test_unknown_feature_on_change_passphrase(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    print(cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION))
+    add_unknown_feature(repo_path, Manifest.Operation.CHECK)
+    cmd_raises_unknown_feature(archiver, [f"--repo={repo_location}", "key", "change-passphrase"])
+
+
+def test_unknown_feature_on_read(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    print(cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION))
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    add_unknown_feature(repo_path, Manifest.Operation.READ)
+    with changedir("output"):
+        cmd_raises_unknown_feature(archiver, [f"--repo={repo_location}", "extract", "test"])
+    cmd_raises_unknown_feature(archiver, [f"--repo={repo_location}", "rlist"])
+    cmd_raises_unknown_feature(archiver, [f"--repo={repo_location}", "info", "-a", "test"])
+
+
+def test_unknown_feature_on_rename(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    print(cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION))
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    add_unknown_feature(repo_path, Manifest.Operation.CHECK)
+    cmd_raises_unknown_feature(archiver, [f"--repo={repo_location}", "rename", "test", "other"])
+
+
+def test_unknown_feature_on_delete(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    print(cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION))
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    add_unknown_feature(repo_path, Manifest.Operation.DELETE)
+    # delete of an archive raises
+    cmd_raises_unknown_feature(archiver, [f"--repo={repo_location}", "delete", "-a", "test"])
+    cmd_raises_unknown_feature(archiver, [f"--repo={repo_location}", "prune", "--keep-daily=3"])
+    # delete of the whole repository ignores features
+    cmd(archiver, f"--repo={repo_location}", "rdelete")
+
+
+@pytest.mark.skipif(not llfuse, reason="llfuse not installed")
+def test_unknown_feature_on_mount(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    add_unknown_feature(repo_path, Manifest.Operation.READ)
+    mountpoint = os.path.join(archiver.tmpdir, "mountpoint")
+    os.mkdir(mountpoint)
+    # XXX this might hang if it doesn't raise an error
+    cmd_raises_unknown_feature(archiver, [f"--repo={repo_location}::test", "mount", mountpoint])
+
+
+@pytest.mark.allow_cache_wipe
+def test_unknown_mandatory_feature_in_cache(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    remote_repo = True if archiver.get_kind() == "remote" else False
+    print(cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION))
+
+    with Repository(repo_path, exclusive=True) as repository:
+        if remote_repo:
+            repository._location = Location(repo_location)
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+        with Cache(repository, manifest) as cache:
+            cache.begin_txn()
+            cache.cache_config.mandatory_features = {"unknown-feature"}
+            cache.commit()
+
+    if archiver.FORK_DEFAULT:
+        cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    else:
+        called = False
+        wipe_cache_safe = LocalCache.wipe_cache
+
+        def wipe_wrapper(*args):
+            nonlocal called
+            called = True
+            wipe_cache_safe(*args)
+
+        with patch.object(LocalCache, "wipe_cache", wipe_wrapper):
+            cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+
+        assert called
+
+    with Repository(repo_path, exclusive=True) as repository:
+        if remote_repo:
+            repository._location = Location(repo_location)
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+        with Cache(repository, manifest) as cache:
+            assert cache.cache_config.mandatory_features == set()
+
+
+def test_check_cache(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    with open_repository(archiver) as repository:
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+        with Cache(repository, manifest, sync=False) as cache:
+            cache.begin_txn()
+            cache.chunks.incref(list(cache.chunks.iteritems())[0][0])
+            cache.commit()
+    with pytest.raises(AssertionError):
+        check_cache(archiver)
+
+
+#  Begin manifest tests
+def spoof_manifest(repository):
+    with repository:
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+        cdata = manifest.repo_objs.format(
+            Manifest.MANIFEST_ID,
+            {},
+            msgpack.packb(
+                {
+                    "version": 1,
+                    "archives": {},
+                    "config": {},
+                    "timestamp": (datetime.now(tz=timezone.utc) + timedelta(days=1)).isoformat(timespec="microseconds"),
+                }
+            ),
+        )
+        repository.put(Manifest.MANIFEST_ID, cdata)
+        repository.commit(compact=False)
+
+
+def test_fresh_init_tam_required(archiver):
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    repository = Repository(repo_path, exclusive=True)
+    with repository:
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+        cdata = manifest.repo_objs.format(
+            Manifest.MANIFEST_ID,
+            {},
+            msgpack.packb(
+                {
+                    "version": 1,
+                    "archives": {},
+                    "timestamp": (datetime.now(tz=timezone.utc) + timedelta(days=1)).isoformat(timespec="microseconds"),
+                }
+            ),
+        )
+        repository.put(Manifest.MANIFEST_ID, cdata)
+        repository.commit(compact=False)
+
+    with pytest.raises(TAMRequiredError):
+        cmd(archiver, f"--repo={repo_location}", "rlist")
+
+
+def test_not_required(archiver):
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_src_archive(archiver, "archive1234")
+    repository = Repository(repo_path, exclusive=True)
+    # Manifest must be authenticated now
+    output = cmd(archiver, f"--repo={repo_location}", "rlist", "--debug")
+    assert "archive1234" in output
+    assert "TAM-verified manifest" in output
+    # Try to spoof / modify pre-1.0.9
+    spoof_manifest(repository)
+    # Fails
+    with pytest.raises(TAMRequiredError):
+        cmd(archiver, f"--repo={repo_location}", "rlist")
+
+
+# Begin Remote Tests
+def test_remote_repo_restrict_to_path(remote_archiver):
+    repo_location, repo_path = remote_archiver.repository_location, remote_archiver.repository_path
+    # restricted to repo directory itself:
+    with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-path", repo_path]):
+        cmd(remote_archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    # restricted to repo directory itself, fail for other directories with same prefix:
+    with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-path", repo_path]):
+        with pytest.raises(PathNotAllowed):
+            cmd(remote_archiver, f"--repo={repo_location}_0", "rcreate", RK_ENCRYPTION)
+    # restricted to a completely different path:
+    with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-path", "/foo"]):
+        with pytest.raises(PathNotAllowed):
+            cmd(remote_archiver, f"--repo={repo_location}_1", "rcreate", RK_ENCRYPTION)
+    path_prefix = os.path.dirname(repo_path)
+    # restrict to repo directory's parent directory:
+    with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-path", path_prefix]):
+        cmd(remote_archiver, f"--repo={repo_location}_2", "rcreate", RK_ENCRYPTION)
+    # restrict to repo directory's parent directory and another directory:
+    with patch.object(
+        RemoteRepository, "extra_test_args", ["--restrict-to-path", "/foo", "--restrict-to-path", path_prefix]
+    ):
+        cmd(remote_archiver, f"--repo={repo_location}_3", "rcreate", RK_ENCRYPTION)
+
+
+def test_remote_repo_restrict_to_repository(remote_archiver):
+    repo_location, repo_path = remote_archiver.repository_location, remote_archiver.repository_path
+    # restricted to repo directory itself:
+    with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-repository", repo_path]):
+        cmd(remote_archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    parent_path = os.path.join(repo_path, "..")
+    with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-repository", parent_path]):
+        with pytest.raises(PathNotAllowed):
+            cmd(remote_archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+
+
+def test_remote_repo_strip_components_doesnt_leak(remote_archiver):
+    repo_location, input_path = remote_archiver.repository_location, remote_archiver.input_path
+    cmd(remote_archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "dir/file", contents=b"test file contents 1")
+    create_regular_file(input_path, "dir/file2", contents=b"test file contents 2")
+    create_regular_file(input_path, "skipped-file1", contents=b"test file contents 3")
+    create_regular_file(input_path, "skipped-file2", contents=b"test file contents 4")
+    create_regular_file(input_path, "skipped-file3", contents=b"test file contents 5")
+    cmd(remote_archiver, f"--repo={repo_location}", "create", "test", "input")
+    marker = "cached responses left in RemoteRepository"
+    with changedir("output"):
+        res = cmd(remote_archiver, f"--repo={repo_location}", "extract", "test", "--debug", "--strip-components", "3")
+        assert marker not in res
+        with assert_creates_file("file"):
+            res = cmd(
+                remote_archiver, f"--repo={repo_location}", "extract", "test", "--debug", "--strip-components", "2"
             )
             )
-            repository.put(Manifest.MANIFEST_ID, cdata)
-            repository.commit(compact=False)
-
-    def test_fresh_init_tam_required(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        repository = Repository(self.repository_path, exclusive=True)
-        with repository:
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-            cdata = manifest.repo_objs.format(
-                Manifest.MANIFEST_ID,
-                {},
-                msgpack.packb(
-                    {
-                        "version": 1,
-                        "archives": {},
-                        "timestamp": (datetime.now(tz=timezone.utc) + timedelta(days=1)).isoformat(
-                            timespec="microseconds"
-                        ),
-                    }
-                ),
+            assert marker not in res
+        with assert_creates_file("dir/file"):
+            res = cmd(
+                remote_archiver, f"--repo={repo_location}", "extract", "test", "--debug", "--strip-components", "1"
             )
             )
-            repository.put(Manifest.MANIFEST_ID, cdata)
-            repository.commit(compact=False)
-
-        with pytest.raises(TAMRequiredError):
-            self.cmd(f"--repo={self.repository_location}", "rlist")
-
-    def test_not_required(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_src_archive("archive1234")
-        repository = Repository(self.repository_path, exclusive=True)
-        # Manifest must be authenticated now
-        output = self.cmd(f"--repo={self.repository_location}", "rlist", "--debug")
-        assert "archive1234" in output
-        assert "TAM-verified manifest" in output
-        # Try to spoof / modify pre-1.0.9
-        self.spoof_manifest(repository)
-        # Fails
-        with pytest.raises(TAMRequiredError):
-            self.cmd(f"--repo={self.repository_location}", "rlist")
-
-
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    def test_remote_repo_restrict_to_path(self):
-        # restricted to repo directory itself:
-        with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-path", self.repository_path]):
-            self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        # restricted to repo directory itself, fail for other directories with same prefix:
-        with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-path", self.repository_path]):
-            with pytest.raises(PathNotAllowed):
-                self.cmd(f"--repo={self.repository_location}_0", "rcreate", RK_ENCRYPTION)
-
-        # restricted to a completely different path:
-        with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-path", "/foo"]):
-            with pytest.raises(PathNotAllowed):
-                self.cmd(f"--repo={self.repository_location}_1", "rcreate", RK_ENCRYPTION)
-        path_prefix = os.path.dirname(self.repository_path)
-        # restrict to repo directory's parent directory:
-        with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-path", path_prefix]):
-            self.cmd(f"--repo={self.repository_location}_2", "rcreate", RK_ENCRYPTION)
-        # restrict to repo directory's parent directory and another directory:
-        with patch.object(
-            RemoteRepository, "extra_test_args", ["--restrict-to-path", "/foo", "--restrict-to-path", path_prefix]
-        ):
-            self.cmd(f"--repo={self.repository_location}_3", "rcreate", RK_ENCRYPTION)
-
-    def test_remote_repo_restrict_to_repository(self):
-        # restricted to repo directory itself:
-        with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-repository", self.repository_path]):
-            self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        parent_path = os.path.join(self.repository_path, "..")
-        with patch.object(RemoteRepository, "extra_test_args", ["--restrict-to-repository", parent_path]):
-            with pytest.raises(PathNotAllowed):
-                self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-
-    def test_remote_repo_strip_components_doesnt_leak(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("dir/file", contents=b"test file contents 1")
-        self.create_regular_file("dir/file2", contents=b"test file contents 2")
-        self.create_regular_file("skipped-file1", contents=b"test file contents 3")
-        self.create_regular_file("skipped-file2", contents=b"test file contents 4")
-        self.create_regular_file("skipped-file3", contents=b"test file contents 5")
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        marker = "cached responses left in RemoteRepository"
-        with changedir("output"):
-            res = self.cmd(
-                f"--repo={self.repository_location}", "extract", "test", "--debug", "--strip-components", "3"
+            assert marker not in res
+        with assert_creates_file("input/dir/file"):
+            res = cmd(
+                remote_archiver, f"--repo={repo_location}", "extract", "test", "--debug", "--strip-components", "0"
             )
             )
             assert marker not in res
             assert marker not in res
-            with self.assert_creates_file("file"):
-                res = self.cmd(
-                    f"--repo={self.repository_location}", "extract", "test", "--debug", "--strip-components", "2"
-                )
-                assert marker not in res
-            with self.assert_creates_file("dir/file"):
-                res = self.cmd(
-                    f"--repo={self.repository_location}", "extract", "test", "--debug", "--strip-components", "1"
-                )
-                assert marker not in res
-            with self.assert_creates_file("input/dir/file"):
-                res = self.cmd(
-                    f"--repo={self.repository_location}", "extract", "test", "--debug", "--strip-components", "0"
-                )
-                assert marker not in res

+ 43 - 45
src/borg/testsuite/archiver/config_cmd.py

@@ -1,48 +1,46 @@
 import os
 import os
-import unittest
 
 
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
-from . import ArchiverTestCaseBase, ArchiverTestCaseBinaryBase, RK_ENCRYPTION, BORG_EXES
-
-
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_config(self):
-        self.create_test_files()
-        os.unlink("input/flagfile")
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(f"--repo={self.repository_location}", "config", "--list")
-        self.assert_in("[repository]", output)
-        self.assert_in("version", output)
-        self.assert_in("segments_per_dir", output)
-        self.assert_in("storage_quota", output)
-        self.assert_in("append_only", output)
-        self.assert_in("additional_free_space", output)
-        self.assert_in("id", output)
-        self.assert_not_in("last_segment_checked", output)
-
-        output = self.cmd(f"--repo={self.repository_location}", "config", "last_segment_checked", exit_code=1)
-        self.assert_in("No option ", output)
-        self.cmd(f"--repo={self.repository_location}", "config", "last_segment_checked", "123")
-        output = self.cmd(f"--repo={self.repository_location}", "config", "last_segment_checked")
-        assert output == "123" + os.linesep
-        output = self.cmd(f"--repo={self.repository_location}", "config", "--list")
-        self.assert_in("last_segment_checked", output)
-        self.cmd(f"--repo={self.repository_location}", "config", "--delete", "last_segment_checked")
-
-        for cfg_key, cfg_value in [("additional_free_space", "2G"), ("repository.append_only", "1")]:
-            output = self.cmd(f"--repo={self.repository_location}", "config", cfg_key)
-            assert output == "0" + os.linesep
-            self.cmd(f"--repo={self.repository_location}", "config", cfg_key, cfg_value)
-            output = self.cmd(f"--repo={self.repository_location}", "config", cfg_key)
-            assert output == cfg_value + os.linesep
-            self.cmd(f"--repo={self.repository_location}", "config", "--delete", cfg_key)
-            self.cmd(f"--repo={self.repository_location}", "config", cfg_key, exit_code=1)
-
-        self.cmd(f"--repo={self.repository_location}", "config", "--list", "--delete", exit_code=2)
-        self.cmd(f"--repo={self.repository_location}", "config", exit_code=2)
-        self.cmd(f"--repo={self.repository_location}", "config", "invalid-option", exit_code=1)
-
-
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    """runs the same tests, but via the borg binary"""
+from . import RK_ENCRYPTION, create_test_files, cmd, generate_archiver_tests
+
+# Tests that include the 'archivers' argument will generate a tests for each kind of archivers specified.
+pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,binary")  # NOQA
+
+
+def test_config(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    create_test_files(archiver.input_path)
+    os.unlink("input/flagfile")
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(archiver, f"--repo={repo_location}", "config", "--list")
+    assert "[repository]" in output
+    assert "version" in output
+    assert "segments_per_dir" in output
+    assert "storage_quota" in output
+    assert "append_only" in output
+    assert "additional_free_space" in output
+    assert "id" in output
+    assert "last_segment_checked" not in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "config", "last_segment_checked", exit_code=1)
+    assert "No option " in output
+    cmd(archiver, f"--repo={repo_location}", "config", "last_segment_checked", "123")
+    output = cmd(archiver, f"--repo={repo_location}", "config", "last_segment_checked")
+    assert output == "123" + os.linesep
+    output = cmd(archiver, f"--repo={repo_location}", "config", "--list")
+    assert "last_segment_checked" in output
+    cmd(archiver, f"--repo={repo_location}", "config", "--delete", "last_segment_checked")
+
+    for cfg_key, cfg_value in [("additional_free_space", "2G"), ("repository.append_only", "1")]:
+        output = cmd(archiver, f"--repo={repo_location}", "config", cfg_key)
+        assert output == "0" + os.linesep
+        cmd(archiver, f"--repo={repo_location}", "config", cfg_key, cfg_value)
+        output = cmd(archiver, f"--repo={repo_location}", "config", cfg_key)
+        assert output == cfg_value + os.linesep
+        cmd(archiver, f"--repo={repo_location}", "config", "--delete", cfg_key)
+        cmd(archiver, f"--repo={repo_location}", "config", cfg_key, exit_code=1)
+
+    cmd(archiver, f"--repo={repo_location}", "config", "--list", "--delete", exit_code=2)
+    cmd(archiver, f"--repo={repo_location}", "config", exit_code=2)
+    cmd(archiver, f"--repo={repo_location}", "config", "invalid-option", exit_code=1)

+ 101 - 90
src/borg/testsuite/archiver/corruption.py

@@ -8,93 +8,104 @@ import pytest
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
 from ...crypto.file_integrity import FileIntegrityError
 from ...crypto.file_integrity import FileIntegrityError
 from ...helpers import bin_to_hex
 from ...helpers import bin_to_hex
-from . import ArchiverTestCaseBase, RK_ENCRYPTION
-
-
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_check_corrupted_repository(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_src_archive("test")
-        self.cmd(f"--repo={self.repository_location}", "extract", "test", "--dry-run")
-        self.cmd(f"--repo={self.repository_location}", "check")
-
-        name = sorted(os.listdir(os.path.join(self.tmpdir, "repository", "data", "0")), reverse=True)[1]
-        with open(os.path.join(self.tmpdir, "repository", "data", "0", name), "r+b") as fd:
-            fd.seek(100)
-            fd.write(b"XXXX")
-
-        self.cmd(f"--repo={self.repository_location}", "check", exit_code=1)
-
-
-class ArchiverCorruptionTestCase(ArchiverTestCaseBase):
-    def setUp(self):
-        super().setUp()
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cache_path = json.loads(self.cmd(f"--repo={self.repository_location}", "rinfo", "--json"))["cache"]["path"]
-
-    def corrupt(self, file, amount=1):
-        with open(file, "r+b") as fd:
-            fd.seek(-amount, io.SEEK_END)
-            corrupted = bytes(255 - c for c in fd.read(amount))
-            fd.seek(-amount, io.SEEK_END)
-            fd.write(corrupted)
-
-    def test_cache_chunks(self):
-        self.corrupt(os.path.join(self.cache_path, "chunks"))
-
-        if self.FORK_DEFAULT:
-            out = self.cmd(f"--repo={self.repository_location}", "rinfo", exit_code=2)
-            assert "failed integrity check" in out
-        else:
-            with pytest.raises(FileIntegrityError):
-                self.cmd(f"--repo={self.repository_location}", "rinfo")
-
-    def test_cache_files(self):
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.corrupt(os.path.join(self.cache_path, "files"))
-        out = self.cmd(f"--repo={self.repository_location}", "create", "test1", "input")
-        # borg warns about the corrupt files cache, but then continues without files cache.
-        assert "files cache is corrupted" in out
-
-    def test_chunks_archive(self):
-        self.cmd(f"--repo={self.repository_location}", "create", "test1", "input")
-        # Find ID of test1 so we can corrupt it later :)
-        target_id = self.cmd(f"--repo={self.repository_location}", "rlist", "--format={id}{NL}").strip()
-        self.cmd(f"--repo={self.repository_location}", "create", "test2", "input")
-
-        # Force cache sync, creating archive chunks of test1 and test2 in chunks.archive.d
-        self.cmd(f"--repo={self.repository_location}", "rdelete", "--cache-only")
-        self.cmd(f"--repo={self.repository_location}", "rinfo", "--json")
-
-        chunks_archive = os.path.join(self.cache_path, "chunks.archive.d")
-        assert len(os.listdir(chunks_archive)) == 4  # two archives, one chunks cache and one .integrity file each
-
-        self.corrupt(os.path.join(chunks_archive, target_id + ".compact"))
-
-        # Trigger cache sync by changing the manifest ID in the cache config
-        config_path = os.path.join(self.cache_path, "config")
-        config = ConfigParser(interpolation=None)
-        config.read(config_path)
-        config.set("cache", "manifest", bin_to_hex(bytes(32)))
-        with open(config_path, "w") as fd:
-            config.write(fd)
-
-        # Cache sync notices corrupted archive chunks, but automatically recovers.
-        out = self.cmd(f"--repo={self.repository_location}", "create", "-v", "test3", "input", exit_code=1)
-        assert "Reading cached archive chunk index for test1" in out
-        assert "Cached archive chunk index of test1 is corrupted" in out
-        assert "Fetching and building archive index for test1" in out
-
-    def test_old_version_interfered(self):
-        # Modify the main manifest ID without touching the manifest ID in the integrity section.
-        # This happens if a version without integrity checking modifies the cache.
-        config_path = os.path.join(self.cache_path, "config")
-        config = ConfigParser(interpolation=None)
-        config.read(config_path)
-        config.set("cache", "manifest", bin_to_hex(bytes(32)))
-        with open(config_path, "w") as fd:
-            config.write(fd)
-
-        out = self.cmd(f"--repo={self.repository_location}", "rinfo")
-        assert "Cache integrity data not available: old Borg version modified the cache." in out
+from . import cmd, create_src_archive, create_test_files, RK_ENCRYPTION
+
+
+def test_check_corrupted_repository(archiver):
+    repo_location, tmpdir = archiver.repository_location, archiver.tmpdir
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_src_archive(archiver, "test")
+    cmd(archiver, f"--repo={repo_location}", "extract", "test", "--dry-run")
+    cmd(archiver, f"--repo={repo_location}", "check")
+
+    name = sorted(os.listdir(os.path.join(tmpdir, "repository", "data", "0")), reverse=True)[1]
+    with open(os.path.join(tmpdir, "repository", "data", "0", name), "r+b") as fd:
+        fd.seek(100)
+        fd.write(b"XXXX")
+
+    cmd(archiver, f"--repo={repo_location}", "check", exit_code=1)
+
+
+def corrupt_archiver(archiver):
+    create_test_files(archiver.input_path)
+    cmd(archiver, f"--repo={archiver.repository_location}", "rcreate", RK_ENCRYPTION)
+    archiver.cache_path = json.loads(cmd(archiver, f"--repo={archiver.repository_location}", "rinfo", "--json"))[
+        "cache"
+    ]["path"]
+
+
+def corrupt(file, amount=1):
+    with open(file, "r+b") as fd:
+        fd.seek(-amount, io.SEEK_END)
+        corrupted = bytes(255 - c for c in fd.read(amount))
+        fd.seek(-amount, io.SEEK_END)
+        fd.write(corrupted)
+
+
+def test_cache_chunks(archiver):
+    corrupt_archiver(archiver)
+    repo_location, cache_path = archiver.repository_location, archiver.cache_path
+    corrupt(os.path.join(cache_path, "chunks"))
+    if archiver.FORK_DEFAULT:
+        out = cmd(archiver, f"--repo={repo_location}", "rinfo", exit_code=2)
+        assert "failed integrity check" in out
+    else:
+        with pytest.raises(FileIntegrityError):
+            cmd(archiver, f"--repo={repo_location}", "rinfo")
+
+
+def test_cache_files(archiver):
+    corrupt_archiver(archiver)
+    repo_location, cache_path = archiver.repository_location, archiver.cache_path
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    corrupt(os.path.join(cache_path, "files"))
+    out = cmd(archiver, f"--repo={repo_location}", "create", "test1", "input")
+    # borg warns about the corrupt files cache, but then continues without files cache.
+    assert "files cache is corrupted" in out
+
+
+def test_chunks_archive(archiver):
+    corrupt_archiver(archiver)
+    repo_location, cache_path = archiver.repository_location, archiver.cache_path
+    cmd(archiver, f"--repo={repo_location}", "create", "test1", "input")
+    # Find ID of test1, so we can corrupt it later :)
+    target_id = cmd(archiver, f"--repo={repo_location}", "rlist", "--format={id}{NL}").strip()
+    cmd(archiver, f"--repo={repo_location}", "create", "test2", "input")
+
+    # Force cache sync, creating archive chunks of test1 and test2 in chunks.archive.d
+    cmd(archiver, f"--repo={repo_location}", "rdelete", "--cache-only")
+    cmd(archiver, f"--repo={repo_location}", "rinfo", "--json")
+
+    chunks_archive = os.path.join(cache_path, "chunks.archive.d")
+    assert len(os.listdir(chunks_archive)) == 4  # two archives, one chunks cache and one .integrity file each
+
+    corrupt(os.path.join(chunks_archive, target_id + ".compact"))
+
+    # Trigger cache sync by changing the manifest ID in the cache config
+    config_path = os.path.join(cache_path, "config")
+    config = ConfigParser(interpolation=None)
+    config.read(config_path)
+    config.set("cache", "manifest", bin_to_hex(bytes(32)))
+    with open(config_path, "w") as fd:
+        config.write(fd)
+
+    # Cache sync notices corrupted archive chunks, but automatically recovers.
+    out = cmd(archiver, f"--repo={repo_location}", "create", "-v", "test3", "input", exit_code=1)
+    assert "Reading cached archive chunk index for test1" in out
+    assert "Cached archive chunk index of test1 is corrupted" in out
+    assert "Fetching and building archive index for test1" in out
+
+
+def test_old_version_interfered(archiver):
+    corrupt_archiver(archiver)
+    # Modify the main manifest ID without touching the manifest ID in the integrity section.
+    # This happens if a version without integrity checking modifies the cache.
+    repo_location, cache_path = archiver.repository_location, archiver.cache_path
+    config_path = os.path.join(cache_path, "config")
+    config = ConfigParser(interpolation=None)
+    config.read(config_path)
+    config.set("cache", "manifest", bin_to_hex(bytes(32)))
+    with open(config_path, "w") as fd:
+        config.write(fd)
+    out = cmd(archiver, f"--repo={repo_location}", "rinfo")
+    assert "Cache integrity data not available: old Borg version modified the cache." in out

+ 1094 - 946
src/borg/testsuite/archiver/create_cmd.py

@@ -1,13 +1,13 @@
 import errno
 import errno
 import json
 import json
 import os
 import os
+import tempfile
 from random import randbytes
 from random import randbytes
 import shutil
 import shutil
 import socket
 import socket
 import stat
 import stat
 import subprocess
 import subprocess
 import time
 import time
-import unittest
 
 
 import pytest
 import pytest
 
 
@@ -28,956 +28,1104 @@ from .. import (
     is_root,
     is_root,
 )
 )
 from . import (
 from . import (
-    ArchiverTestCaseBase,
-    ArchiverTestCaseBinaryBase,
-    RemoteArchiverTestCaseBase,
-    RK_ENCRYPTION,
-    BORG_EXES,
+    cmd,
+    generate_archiver_tests,
+    create_test_files,
+    assert_dirs_equal,
+    create_regular_file,
     requires_hardlinks,
     requires_hardlinks,
+    _create_test_caches,
+    _create_test_tagged,
+    _create_test_keep_tagged,
+    _assert_test_caches,
+    _assert_test_tagged,
+    _assert_test_keep_tagged,
+    RK_ENCRYPTION,
 )
 )
 
 
+pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary")  # NOQA
 
 
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_basic_functionality(self):
-        have_root = self.create_test_files()
-        # fork required to test show-rc output
-        output = self.cmd(
-            f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION, "--show-version", "--show-rc", fork=True
-        )
-        self.assert_in("borgbackup version", output)
-        self.assert_in("terminating with success status, rc 0", output)
-        self.cmd(f"--repo={self.repository_location}", "create", "--exclude-nodump", "test", "input")
-        output = self.cmd(
-            f"--repo={self.repository_location}", "create", "--exclude-nodump", "--stats", "test.2", "input"
-        )
-        self.assert_in("Archive name: test.2", output)
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-        list_output = self.cmd(f"--repo={self.repository_location}", "rlist", "--short")
-        self.assert_in("test", list_output)
-        self.assert_in("test.2", list_output)
-        expected = [
-            "input",
-            "input/bdev",
-            "input/cdev",
-            "input/dir2",
-            "input/dir2/file2",
-            "input/empty",
-            "input/file1",
-            "input/flagfile",
-        ]
-        if are_fifos_supported():
-            expected.append("input/fifo1")
-        if are_symlinks_supported():
-            expected.append("input/link1")
-        if are_hardlinks_supported():
-            expected.append("input/hardlink")
-        if not have_root:
-            # we could not create these device files without (fake)root
-            expected.remove("input/bdev")
-            expected.remove("input/cdev")
-        if has_lchflags:
-            # remove the file we did not backup, so input and output become equal
-            expected.remove("input/flagfile")  # this file is UF_NODUMP
-            os.remove(os.path.join("input", "flagfile"))
-        list_output = self.cmd(f"--repo={self.repository_location}", "list", "test", "--short")
-        for name in expected:
-            self.assert_in(name, list_output)
-        self.assert_dirs_equal("input", "output/input")
-        info_output = self.cmd(f"--repo={self.repository_location}", "info", "-a", "test")
-        item_count = 5 if has_lchflags else 6  # one file is UF_NODUMP
-        self.assert_in("Number of files: %d" % item_count, info_output)
-        shutil.rmtree(self.cache_path)
-        info_output2 = self.cmd(f"--repo={self.repository_location}", "info", "-a", "test")
-
-        def filter(output):
-            # filter for interesting "info" output, ignore cache rebuilding related stuff
-            prefixes = ["Name:", "Fingerprint:", "Number of files:", "This archive:", "All archives:", "Chunk index:"]
-            result = []
-            for line in output.splitlines():
-                for prefix in prefixes:
-                    if line.startswith(prefix):
-                        result.append(line)
-            return "\n".join(result)
-
-        # the interesting parts of info_output2 and info_output should be same
-        self.assert_equal(filter(info_output), filter(info_output2))
-
-    def test_archived_paths(self):
-        # As borg comes from the POSIX (Linux, UNIX) world, a lot of stuff assumes path separators
-        # to be slashes "/", e.g.: in archived items, for pattern matching.
-        # To make our lives easier and to support cross-platform extraction we always use slashes.
-        # Similarly, archived paths are expected to be full, but relative (have no leading slash).
-        full_path = os.path.abspath(os.path.join(self.input_path, "test"))
-        # remove colon from windows drive letter, if any:
-        posix_path = full_path.replace(":", "") if full_path[1] == ":" else full_path
-        # only needed on windows in case there are backslashes:
-        posix_path = posix_path.replace("\\", "/")
-        # no leading slash in borg archives:
-        archived_path = posix_path.lstrip("/")
-        self.create_regular_file("test")
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input", full_path)
-        # "input" directory is recursed into, "input/test" is discovered and joined by borg's recursion.
-        # full_path was directly given as a cli argument and should end up as archive_path in the borg archive.
-        expected_paths = sorted(["input", "input/test", archived_path])
-        # check path in archived items:
-        archive_list = self.cmd(f"--repo={self.repository_location}", "list", "test", "--short")
-        assert expected_paths == sorted([path for path in archive_list.splitlines() if path])
-        # check path in archived items (json):
-        archive_list = self.cmd(f"--repo={self.repository_location}", "list", "test", "--json-lines")
-        assert expected_paths == sorted([json.loads(line)["path"] for line in archive_list.splitlines() if line])
-
-    @requires_hardlinks
-    def test_create_duplicate_root(self):
-        # setup for #5603
-        path_a = os.path.join(self.input_path, "a")
-        path_b = os.path.join(self.input_path, "b")
-        os.mkdir(path_a)
-        os.mkdir(path_b)
-        hl_a = os.path.join(path_a, "hardlink")
-        hl_b = os.path.join(path_b, "hardlink")
-        self.create_regular_file(hl_a, contents=b"123456")
-        os.link(hl_a, hl_b)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input", "input")  # give input twice!
-        # test if created archive has 'input' contents twice:
-        archive_list = self.cmd(f"--repo={self.repository_location}", "list", "test", "--json-lines")
-        paths = [json.loads(line)["path"] for line in archive_list.split("\n") if line]
-        # we have all fs items exactly once!
-        assert sorted(paths) == ["input", "input/a", "input/a/hardlink", "input/b", "input/b/hardlink"]
-
-    @pytest.mark.skipif(is_win32, reason="unix sockets not available on windows")
-    def test_unix_socket(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        try:
+
+def test_basic_functionality(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    if archiver.EXE:
+        pytest.skip("test_basic_functionality seems incompatible with fakeroot and/or the binary.")
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    have_root = create_test_files(input_path)
+
+    # fork required to test show-rc output
+    output = cmd(
+        archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION, "--show-version", "--show-rc", fork=True
+    )
+    assert "borgbackup version" in output
+    assert "terminating with success status, rc 0" in output
+
+    cmd(archiver, f"--repo={repo_location}", "create", "--exclude-nodump", "test", "input")
+    output = cmd(archiver, f"--repo={repo_location}", "create", "--exclude-nodump", "--stats", "test.2", "input")
+    assert "Archive name: test.2" in output
+
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+
+    list_output = cmd(archiver, f"--repo={repo_location}", "rlist", "--short")
+    assert "test" in list_output
+    assert "test.2" in list_output
+
+    expected = [
+        "input",
+        "input/bdev",
+        "input/cdev",
+        "input/dir2",
+        "input/dir2/file2",
+        "input/empty",
+        "input/file1",
+        "input/flagfile",
+    ]
+    if are_fifos_supported():
+        expected.append("input/fifo1")
+    if are_symlinks_supported():
+        expected.append("input/link1")
+    if are_hardlinks_supported():
+        expected.append("input/hardlink")
+    if not have_root:
+        # we could not create these device files without (fake)root
+        expected.remove("input/bdev")
+        expected.remove("input/cdev")
+    if has_lchflags:
+        # remove the file we did not back up, so input and output become equal
+        expected.remove("input/flagfile")  # this file is UF_NODUMP
+        os.remove(os.path.join("input", "flagfile"))
+
+    list_output = cmd(archiver, f"--repo={repo_location}", "list", "test", "--short")
+    for name in expected:
+        assert name in list_output
+    assert_dirs_equal("input", "output/input")
+
+    info_output = cmd(archiver, f"--repo={repo_location}", "info", "-a", "test")
+    item_count = 5 if has_lchflags else 6  # one file is UF_NODUMP
+    assert "Number of files: %d" % item_count in info_output
+    shutil.rmtree(archiver.cache_path)
+    info_output2 = cmd(archiver, f"--repo={repo_location}", "info", "-a", "test")
+
+    def filter(output):
+        # filter for interesting "info" output, ignore cache rebuilding related stuff
+        prefixes = ["Name:", "Fingerprint:", "Number of files:", "This archive:", "All archives:", "Chunk index:"]
+        result = []
+        for line in output.splitlines():
+            for prefix in prefixes:
+                if line.startswith(prefix):
+                    result.append(line)
+        return "\n".join(result)
+
+    # the interesting parts of info_output2 and info_output should be same
+    assert filter(info_output) == filter(info_output2)
+
+
+def test_archived_paths(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+
+    # As borg comes from the POSIX (Linux, UNIX) world, a lot of stuff assumes path separators
+    # to be slashes "/", e.g.: in archived items, for pattern matching.
+    # To make our lives easier and to support cross-platform extraction we always use slashes.
+    # Similarly, archived paths are expected to be full, but relative (have no leading slash).
+    full_path = os.path.abspath(os.path.join(archiver.input_path, "test"))
+    # remove windows drive letter, if any:
+    posix_path = full_path[2:] if full_path[1] == ":" else full_path
+    # only needed on Windows in case there are backslashes:
+    posix_path = posix_path.replace("\\", "/")
+    # no leading slash in borg archives:
+    archived_path = posix_path.lstrip("/")
+    create_regular_file(archiver.input_path, "test")
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input", posix_path)
+    # "input" directory is recursed into, "input/test" is discovered and joined by borg's recursion.
+    # posix_path was directly given as a cli argument and should end up as archive_path in the borg archive.
+    expected_paths = sorted(["input", "input/test", archived_path])
+
+    # check path in archived items:
+    archive_list = cmd(archiver, f"--repo={repo_location}", "list", "test", "--short")
+    assert expected_paths == sorted([path for path in archive_list.splitlines() if path])
+
+    # check path in archived items (json):
+    archive_list = cmd(archiver, f"--repo={repo_location}", "list", "test", "--json-lines")
+    assert expected_paths == sorted([json.loads(line)["path"] for line in archive_list.splitlines() if line])
+
+
+@requires_hardlinks
+def test_create_duplicate_root(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    # setup for #5603
+    path_a = os.path.join(input_path, "a")
+    path_b = os.path.join(input_path, "b")
+    os.mkdir(path_a)
+    os.mkdir(path_b)
+    hl_a = os.path.join(path_a, "hardlink")
+    hl_b = os.path.join(path_b, "hardlink")
+    create_regular_file(input_path, hl_a, contents=b"123456")
+    os.link(hl_a, hl_b)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input", "input")  # give input twice!
+    # test if created archive has 'input' contents twice:
+    archive_list = cmd(archiver, f"--repo={repo_location}", "list", "test", "--json-lines")
+    paths = [json.loads(line)["path"] for line in archive_list.split("\n") if line]
+    # we have all fs items exactly once!
+    assert sorted(paths) == ["input", "input/a", "input/a/hardlink", "input/b", "input/b/hardlink"]
+
+
+@pytest.mark.skipif(is_win32, reason="unix sockets not available on windows")
+def test_unix_socket(archivers, request, monkeypatch):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    try:
+        with tempfile.TemporaryDirectory() as temp_dir:
             sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
             sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
-            sock.bind(os.path.join(self.input_path, "unix-socket"))
-        except PermissionError as err:
-            if err.errno == errno.EPERM:
-                pytest.skip("unix sockets disabled or not supported")
-            elif err.errno == errno.EACCES:
-                pytest.skip("permission denied to create unix sockets")
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        sock.close()
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-            assert not os.path.exists("input/unix-socket")
-
-    @pytest.mark.skipif(not is_utime_fully_supported(), reason="cannot properly setup and execute test without utime")
-    @pytest.mark.skipif(
-        not is_birthtime_fully_supported(), reason="cannot properly setup and execute test without birthtime"
+            sock.bind(os.path.join(temp_dir, "unix-socket"))
+    except PermissionError as err:
+        if err.errno == errno.EPERM:
+            pytest.skip("unix sockets disabled or not supported")
+        elif err.errno == errno.EACCES:
+            pytest.skip("permission denied to create unix sockets")
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    sock.close()
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+        print(f"{temp_dir}/unix-socket")
+        assert not os.path.exists(f"{temp_dir}/unix-socket")
+
+
+@pytest.mark.skipif(not is_utime_fully_supported(), reason="cannot setup and execute test without utime")
+@pytest.mark.skipif(not is_birthtime_fully_supported(), reason="cannot setup and execute test without birth time")
+def test_nobirthtime(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_test_files(input_path)
+    birthtime, mtime, atime = 946598400, 946684800, 946771200
+    os.utime("input/file1", (atime, birthtime))
+    os.utime("input/file1", (atime, mtime))
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input", "--nobirthtime")
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+    sti = os.stat("input/file1")
+    sto = os.stat("output/input/file1")
+    assert same_ts_ns(sti.st_birthtime * 1e9, birthtime * 1e9)
+    assert same_ts_ns(sto.st_birthtime * 1e9, mtime * 1e9)
+    assert same_ts_ns(sti.st_mtime_ns, sto.st_mtime_ns)
+    assert same_ts_ns(sto.st_mtime_ns, mtime * 1e9)
+
+
+def test_create_stdin(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    input_data = b"\x00foo\n\nbar\n   \n"
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "-", input=input_data)
+    item = json.loads(cmd(archiver, f"--repo={repo_location}", "list", "test", "--json-lines"))
+    assert item["size"] == len(input_data)
+    assert item["path"] == "stdin"
+    extracted_data = cmd(archiver, f"--repo={repo_location}", "extract", "test", "--stdout", binary_output=True)
+    assert extracted_data == input_data
+
+
+def test_create_stdin_checkpointing(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    chunk_size = 1000  # fixed chunker with this size, also volume based checkpointing after that volume
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    input_data = b"X" * (chunk_size * 2 - 1)  # one full and one partial chunk
+    cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        f"--chunker-params=fixed,{chunk_size}",
+        f"--checkpoint-volume={chunk_size}",
+        "test",
+        "-",
+        input=input_data,
+    )
+    # repo looking good overall? checks for rc == 0.
+    cmd(archiver, f"--repo={repo_location}", "check", "--debug")
+    # verify that there are no part files in final archive
+    out = cmd(archiver, f"--repo={repo_location}", "list", "test")
+    assert "stdin.borg_part" not in out
+    # verify full file
+    out = cmd(archiver, f"--repo={repo_location}", "extract", "test", "stdin", "--stdout", binary_output=True)
+    assert out == input_data
+
+
+def test_create_erroneous_file(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    chunk_size = 1000  # fixed chunker with this size, also volume based checkpointing after that volume
+    create_regular_file(input_path, os.path.join(input_path, "file1"), size=chunk_size * 2)
+    create_regular_file(input_path, os.path.join(input_path, "file2"), size=chunk_size * 2)
+    create_regular_file(input_path, os.path.join(input_path, "file3"), size=chunk_size * 2)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    flist = "".join(f"input/file{n}\n" for n in range(1, 4))
+    out = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        f"--chunker-params=fail,{chunk_size},rrrEEErrrr",
+        "--paths-from-stdin",
+        "--list",
+        "test",
+        input=flist.encode(),
+        exit_code=0,
+    )
+    assert "retry: 3 of " in out
+    assert "E input/file2" not in out  # we managed to read it in the 3rd retry (after 3 failed reads)
+    # repo looking good overall? checks for rc == 0.
+    cmd(archiver, f"--repo={repo_location}", "check", "--debug")
+    # check files in created archive
+    out = cmd(archiver, f"--repo={repo_location}", "list", "test")
+    assert "input/file1" in out
+    assert "input/file2" in out
+    assert "input/file3" in out
+
+
+@pytest.mark.skipif(is_root(), reason="test must not be run as (fake)root")
+def test_create_no_permission_file(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    file_path = os.path.join(input_path, "file")
+    create_regular_file(input_path, file_path + "1", size=1000)
+    create_regular_file(input_path, file_path + "2", size=1000)
+    create_regular_file(input_path, file_path + "3", size=1000)
+    # revoke read permissions on file2 for everybody, including us:
+    if is_win32:
+        subprocess.run(["icacls.exe", file_path + "2", "/deny", "everyone:(R)"])
+    else:
+        # note: this will NOT take away read permissions for root
+        os.chmod(file_path + "2", 0o000)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    flist = "".join(f"input/file{n}\n" for n in range(1, 4))
+    out = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "--paths-from-stdin",
+        "--list",
+        "test",
+        input=flist.encode(),
+        exit_code=1,  # WARNING status: could not back up file2.
+    )
+    assert "retry: 1 of " not in out  # retries were NOT attempted!
+    assert "E input/file2" in out  # no permissions!
+    # repo looking good overall? checks for rc == 0.
+    cmd(archiver, f"--repo={repo_location}", "check", "--debug")
+    # check files in created archive
+    out = cmd(archiver, f"--repo={repo_location}", "list", "test")
+    assert "input/file1" in out
+    assert "input/file2" not in out  # it skipped file2
+    assert "input/file3" in out
+
+
+def test_sanitized_stdin_name(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "--stdin-name", "./a//path", "test", "-", input=b"")
+    item = json.loads(cmd(archiver, f"--repo={repo_location}", "list", "test", "--json-lines"))
+    assert item["path"] == "a/path"
+
+
+def test_dotdot_stdin_name(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(
+        archiver, f"--repo={repo_location}", "create", "--stdin-name", "foo/../bar", "test", "-", input=b"", exit_code=2
+    )
+    assert output.endswith("'..' element in path 'foo/../bar'" + os.linesep)
+
+
+def test_dot_stdin_name(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(
+        archiver, f"--repo={repo_location}", "create", "--stdin-name", "./", "test", "-", input=b"", exit_code=2
+    )
+    assert output.endswith("'./' is not a valid file name" + os.linesep)
+
+
+def test_create_content_from_command(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    input_data = "some test content"
+    name = "a/b/c"
+    cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "--stdin-name",
+        name,
+        "--content-from-command",
+        "test",
+        "--",
+        "echo",
+        input_data,
+    )
+    item = json.loads(cmd(archiver, f"--repo={repo_location}", "list", "test", "--json-lines"))
+    assert item["size"] == len(input_data) + 1  # `echo` adds newline
+    assert item["path"] == name
+    extracted_data = cmd(archiver, f"--repo={repo_location}", "extract", "test", "--stdout")
+    assert extracted_data == input_data + "\n"
+
+
+def test_create_content_from_command_with_failed_command(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "--content-from-command",
+        "test",
+        "--",
+        "sh",
+        "-c",
+        "exit 73;",
+        exit_code=2,
+    )
+    assert output.endswith("Command 'sh' exited with status 73" + os.linesep)
+    archive_list = json.loads(cmd(archiver, f"--repo={repo_location}", "rlist", "--json"))
+    assert archive_list["archives"] == []
+
+
+def test_create_content_from_command_missing_command(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test", "--content-from-command", exit_code=2)
+    assert output.endswith("No command given." + os.linesep)
+
+
+def test_create_paths_from_stdin(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "dir1/file2", size=1024 * 80)
+    create_regular_file(input_path, "dir1/file3", size=1024 * 80)
+    create_regular_file(input_path, "file4", size=1024 * 80)
+
+    input_data = b"input/file1\0input/dir1\0input/file4"
+    cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "test",
+        "--paths-from-stdin",
+        "--paths-delimiter",
+        "\\0",
+        input=input_data,
     )
     )
-    def test_nobirthtime(self):
-        self.create_test_files()
-        birthtime, mtime, atime = 946598400, 946684800, 946771200
-        os.utime("input/file1", (atime, birthtime))
-        os.utime("input/file1", (atime, mtime))
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input", "--nobirthtime")
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-        sti = os.stat("input/file1")
-        sto = os.stat("output/input/file1")
-        assert same_ts_ns(sti.st_birthtime * 1e9, birthtime * 1e9)
-        assert same_ts_ns(sto.st_birthtime * 1e9, mtime * 1e9)
-        assert same_ts_ns(sti.st_mtime_ns, sto.st_mtime_ns)
-        assert same_ts_ns(sto.st_mtime_ns, mtime * 1e9)
-
-    def test_create_stdin(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        input_data = b"\x00foo\n\nbar\n   \n"
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "-", input=input_data)
-        item = json.loads(self.cmd(f"--repo={self.repository_location}", "list", "test", "--json-lines"))
-        assert item["size"] == len(input_data)
-        assert item["path"] == "stdin"
-        extracted_data = self.cmd(
-            f"--repo={self.repository_location}", "extract", "test", "--stdout", binary_output=True
-        )
-        assert extracted_data == input_data
-
-    def test_create_stdin_checkpointing(self):
-        chunk_size = 1000  # fixed chunker with this size, also volume based checkpointing after that volume
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        input_data = b"X" * (chunk_size * 2 - 1)  # one full and one partial chunk
-        self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            f"--chunker-params=fixed,{chunk_size}",
-            f"--checkpoint-volume={chunk_size}",
-            "test",
-            "-",
-            input=input_data,
-        )
-        # repo looking good overall? checks for rc == 0.
-        self.cmd(f"--repo={self.repository_location}", "check", "--debug")
-        # verify that there are no part files in final archive
-        out = self.cmd(f"--repo={self.repository_location}", "list", "test")
-        assert "stdin.borg_part" not in out
-        # verify full file
-        out = self.cmd(f"--repo={self.repository_location}", "extract", "test", "stdin", "--stdout", binary_output=True)
-        assert out == input_data
-
-    def test_create_erroneous_file(self):
-        chunk_size = 1000  # fixed chunker with this size, also volume based checkpointing after that volume
-        self.create_regular_file(os.path.join(self.input_path, "file1"), size=chunk_size * 2)
-        self.create_regular_file(os.path.join(self.input_path, "file2"), size=chunk_size * 2)
-        self.create_regular_file(os.path.join(self.input_path, "file3"), size=chunk_size * 2)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        flist = "".join(f"input/file{n}\n" for n in range(1, 4))
-        out = self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            f"--chunker-params=fail,{chunk_size},rrrEEErrrr",
-            "--paths-from-stdin",
-            "--list",
-            "test",
-            input=flist.encode(),
-            exit_code=0,
-        )
-        assert "retry: 3 of " in out
-        assert "E input/file2" not in out  # we managed to read it in the 3rd retry (after 3 failed reads)
-        # repo looking good overall? checks for rc == 0.
-        self.cmd(f"--repo={self.repository_location}", "check", "--debug")
-        # check files in created archive
-        out = self.cmd(f"--repo={self.repository_location}", "list", "test")
-        assert "input/file1" in out
-        assert "input/file2" in out
-        assert "input/file3" in out
-
-    @pytest.mark.skipif(is_root(), reason="test must not be run as (fake)root")
-    def test_create_no_permission_file(self):
-        file_path = os.path.join(self.input_path, "file")
-        self.create_regular_file(file_path + "1", size=1000)
-        self.create_regular_file(file_path + "2", size=1000)
-        self.create_regular_file(file_path + "3", size=1000)
-        # revoke read permissions on file2 for everybody, including us:
-        if is_win32:
-            subprocess.run(["icacls.exe", file_path + "2", "/deny", "everyone:(R)"])
-        else:
-            # note: this will NOT take away read permissions for root
-            os.chmod(file_path + "2", 0o000)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        flist = "".join(f"input/file{n}\n" for n in range(1, 4))
-        out = self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "--paths-from-stdin",
-            "--list",
-            "test",
-            input=flist.encode(),
-            exit_code=1,  # WARNING status: could not back up file2.
-        )
-        assert "retry: 1 of " not in out  # retries were NOT attempted!
-        assert "E input/file2" in out  # no permissions!
-        # repo looking good overall? checks for rc == 0.
-        self.cmd(f"--repo={self.repository_location}", "check", "--debug")
-        # check files in created archive
-        out = self.cmd(f"--repo={self.repository_location}", "list", "test")
-        assert "input/file1" in out
-        assert "input/file2" not in out  # it skipped file2
-        assert "input/file3" in out
-
-    def test_sanitized_stdin_name(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "--stdin-name", "./a//path", "test", "-", input=b"")
-        item = json.loads(self.cmd(f"--repo={self.repository_location}", "list", "test", "--json-lines"))
-        assert item["path"] == "a/path"
-
-    def test_dotdot_stdin_name(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "--stdin-name",
-            "foo/../bar",
-            "test",
-            "-",
-            input=b"",
-            exit_code=2,
-        )
-        assert output.endswith("'..' element in path 'foo/../bar'" + os.linesep)
-
-    def test_dot_stdin_name(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(
-            f"--repo={self.repository_location}", "create", "--stdin-name", "./", "test", "-", input=b"", exit_code=2
-        )
-        assert output.endswith("'./' is not a valid file name" + os.linesep)
-
-    def test_create_content_from_command(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        input_data = "some test content"
-        name = "a/b/c"
-        self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "--stdin-name",
-            name,
-            "--content-from-command",
-            "test",
-            "--",
-            "echo",
-            input_data,
-        )
-        item = json.loads(self.cmd(f"--repo={self.repository_location}", "list", "test", "--json-lines"))
-        assert item["size"] == len(input_data) + 1  # `echo` adds newline
-        assert item["path"] == name
-        extracted_data = self.cmd(f"--repo={self.repository_location}", "extract", "test", "--stdout")
-        assert extracted_data == input_data + "\n"
-
-    def test_create_content_from_command_with_failed_command(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "--content-from-command",
-            "test",
-            "--",
-            "sh",
-            "-c",
-            "exit 73;",
-            exit_code=2,
-        )
-        assert output.endswith("Command 'sh' exited with status 73" + os.linesep)
-        archive_list = json.loads(self.cmd(f"--repo={self.repository_location}", "rlist", "--json"))
-        assert archive_list["archives"] == []
-
-    def test_create_content_from_command_missing_command(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(f"--repo={self.repository_location}", "create", "test", "--content-from-command", exit_code=2)
-        assert output.endswith("No command given." + os.linesep)
-
-    def test_create_paths_from_stdin(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("dir1/file2", size=1024 * 80)
-        self.create_regular_file("dir1/file3", size=1024 * 80)
-        self.create_regular_file("file4", size=1024 * 80)
-
-        input_data = b"input/file1\0input/dir1\0input/file4"
-        self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "test",
-            "--paths-from-stdin",
-            "--paths-delimiter",
-            "\\0",
-            input=input_data,
-        )
-        archive_list = self.cmd(f"--repo={self.repository_location}", "list", "test", "--json-lines")
-        paths = [json.loads(line)["path"] for line in archive_list.split("\n") if line]
-        assert paths == ["input/file1", "input/dir1", "input/file4"]
-
-    def test_create_paths_from_command(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("file2", size=1024 * 80)
-        self.create_regular_file("file3", size=1024 * 80)
-        self.create_regular_file("file4", size=1024 * 80)
-
-        input_data = "input/file1\ninput/file2\ninput/file3"
-        if is_win32:
-            with open("filenames.cmd", "w") as script:
-                for filename in input_data.splitlines():
-                    script.write(f"@echo {filename}\n")
-        self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "--paths-from-command",
-            "test",
-            "--",
-            "filenames.cmd" if is_win32 else "echo",
-            input_data,
-        )
-
-        archive_list = self.cmd(f"--repo={self.repository_location}", "list", "test", "--json-lines")
-        paths = [json.loads(line)["path"] for line in archive_list.split("\n") if line]
-        assert paths == ["input/file1", "input/file2", "input/file3"]
-
-    def test_create_paths_from_command_with_failed_command(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "--paths-from-command",
-            "test",
-            "--",
-            "sh",
-            "-c",
-            "exit 73;",
-            exit_code=2,
-        )
-        assert output.endswith("Command 'sh' exited with status 73" + os.linesep)
-        archive_list = json.loads(self.cmd(f"--repo={self.repository_location}", "rlist", "--json"))
-        assert archive_list["archives"] == []
-
-    def test_create_paths_from_command_missing_command(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(f"--repo={self.repository_location}", "create", "test", "--paths-from-command", exit_code=2)
-        assert output.endswith("No command given." + os.linesep)
-
-    def test_create_without_root(self):
-        """test create without a root"""
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", exit_code=2)
-
-    def test_create_pattern_root(self):
-        """test create with only a root pattern"""
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("file2", size=1024 * 80)
-        output = self.cmd(f"--repo={self.repository_location}", "create", "test", "-v", "--list", "--pattern=R input")
-        self.assert_in("A input/file1", output)
-        self.assert_in("A input/file2", output)
-
-    def test_create_pattern(self):
-        """test file patterns during create"""
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("file2", size=1024 * 80)
-        self.create_regular_file("file_important", size=1024 * 80)
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "-v",
-            "--list",
-            "--pattern=+input/file_important",
-            "--pattern=-input/file*",
-            "test",
-            "input",
-        )
-        self.assert_in("A input/file_important", output)
-        self.assert_in("- input/file1", output)
-        self.assert_in("- input/file2", output)
-
-    def test_create_pattern_file(self):
-        """test file patterns during create"""
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("file2", size=1024 * 80)
-        self.create_regular_file("otherfile", size=1024 * 80)
-        self.create_regular_file("file_important", size=1024 * 80)
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "-v",
-            "--list",
-            "--pattern=-input/otherfile",
-            "--patterns-from=" + self.patterns_file_path,
-            "test",
-            "input",
-        )
-        self.assert_in("A input/file_important", output)
-        self.assert_in("- input/file1", output)
-        self.assert_in("- input/file2", output)
-        self.assert_in("- input/otherfile", output)
-
-    def test_create_pattern_exclude_folder_but_recurse(self):
-        """test when patterns exclude a parent folder, but include a child"""
-        self.patterns_file_path2 = os.path.join(self.tmpdir, "patterns2")
-        with open(self.patterns_file_path2, "wb") as fd:
-            fd.write(b"+ input/x/b\n- input/x*\n")
-
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("x/a/foo_a", size=1024 * 80)
-        self.create_regular_file("x/b/foo_b", size=1024 * 80)
-        self.create_regular_file("y/foo_y", size=1024 * 80)
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "-v",
-            "--list",
-            "--patterns-from=" + self.patterns_file_path2,
-            "test",
-            "input",
-        )
-        self.assert_in("- input/x/a/foo_a", output)
-        self.assert_in("A input/x/b/foo_b", output)
-        self.assert_in("A input/y/foo_y", output)
-
-    def test_create_pattern_exclude_folder_no_recurse(self):
-        """test when patterns exclude a parent folder and, but include a child"""
-        self.patterns_file_path2 = os.path.join(self.tmpdir, "patterns2")
-        with open(self.patterns_file_path2, "wb") as fd:
-            fd.write(b"+ input/x/b\n! input/x*\n")
-
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("x/a/foo_a", size=1024 * 80)
-        self.create_regular_file("x/b/foo_b", size=1024 * 80)
-        self.create_regular_file("y/foo_y", size=1024 * 80)
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "-v",
-            "--list",
-            "--patterns-from=" + self.patterns_file_path2,
-            "test",
-            "input",
-        )
-        self.assert_not_in("input/x/a/foo_a", output)
-        self.assert_not_in("input/x/a", output)
-        self.assert_in("A input/y/foo_y", output)
-
-    def test_create_pattern_intermediate_folders_first(self):
-        """test that intermediate folders appear first when patterns exclude a parent folder but include a child"""
-        self.patterns_file_path2 = os.path.join(self.tmpdir, "patterns2")
-        with open(self.patterns_file_path2, "wb") as fd:
-            fd.write(b"+ input/x/a\n+ input/x/b\n- input/x*\n")
-
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-
-        self.create_regular_file("x/a/foo_a", size=1024 * 80)
-        self.create_regular_file("x/b/foo_b", size=1024 * 80)
-        with changedir("input"):
-            self.cmd(
-                f"--repo={self.repository_location}",
-                "create",
-                "--patterns-from=" + self.patterns_file_path2,
-                "test",
-                ".",
-            )
-
-        # list the archive and verify that the "intermediate" folders appear before
-        # their contents
-        out = self.cmd(f"--repo={self.repository_location}", "list", "test", "--format", "{type} {path}{NL}")
-        out_list = out.splitlines()
-
-        self.assert_in("d x/a", out_list)
-        self.assert_in("d x/b", out_list)
-
-        assert out_list.index("d x/a") < out_list.index("- x/a/foo_a")
-        assert out_list.index("d x/b") < out_list.index("- x/b/foo_b")
-
-    def test_create_no_cache_sync(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "rdelete", "--cache-only")
-        create_json = json.loads(
-            self.cmd(
-                f"--repo={self.repository_location}", "create", "--no-cache-sync", "--json", "--error", "test", "input"
-            )
-        )  # ignore experimental warning
-        info_json = json.loads(self.cmd(f"--repo={self.repository_location}", "info", "-a", "test", "--json"))
-        create_stats = create_json["cache"]["stats"]
-        info_stats = info_json["cache"]["stats"]
-        assert create_stats == info_stats
-        self.cmd(f"--repo={self.repository_location}", "rdelete", "--cache-only")
-        self.cmd(f"--repo={self.repository_location}", "create", "--no-cache-sync", "test2", "input")
-        self.cmd(f"--repo={self.repository_location}", "rinfo")
-        self.cmd(f"--repo={self.repository_location}", "check")
-
-    def test_create_archivename_with_placeholder(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        ts = "1999-12-31T23:59:59"
-        name_given = "test-{now}"  # placeholder in archive name gets replaced by borg
-        name_expected = f"test-{ts}"  # placeholder in f-string gets replaced by python
-        self.cmd(f"--repo={self.repository_location}", "create", f"--timestamp={ts}", name_given, "input")
-        list_output = self.cmd(f"--repo={self.repository_location}", "rlist", "--short")
-        assert name_expected in list_output
-
-    def test_exclude_caches(self):
-        self._create_test_caches()
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input", "--exclude-caches")
-        self._assert_test_caches()
-
-    def test_exclude_tagged(self):
-        self._create_test_tagged()
-        self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "test",
-            "input",
-            "--exclude-if-present",
-            ".NOBACKUP",
-            "--exclude-if-present",
-            "00-NOBACKUP",
-        )
-        self._assert_test_tagged()
-
-    def test_exclude_keep_tagged(self):
-        self._create_test_keep_tagged()
-        self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "test",
-            "input",
-            "--exclude-if-present",
-            ".NOBACKUP1",
-            "--exclude-if-present",
-            ".NOBACKUP2",
-            "--exclude-caches",
-            "--keep-exclude-tags",
-        )
-        self._assert_test_keep_tagged()
-
-    def test_path_sanitation(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("dir1/dir2/file", size=1024 * 80)
-        with changedir("input/dir1/dir2"):
-            self.cmd(f"--repo={self.repository_location}", "create", "test", "../../../input/dir1/../dir1/dir2/..")
-        output = self.cmd(f"--repo={self.repository_location}", "list", "test")
-        self.assert_not_in("..", output)
-        self.assert_in(" input/dir1/dir2/file", output)
-
-    def test_exclude_sanitation(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("file2", size=1024 * 80)
-        with changedir("input"):
-            self.cmd(f"--repo={self.repository_location}", "create", "test1", ".", "--exclude=file1")
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test1")
-        self.assert_equal(sorted(os.listdir("output")), ["file2"])
-        with changedir("input"):
-            self.cmd(f"--repo={self.repository_location}", "create", "test2", ".", "--exclude=./file1")
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test2")
-        self.assert_equal(sorted(os.listdir("output")), ["file2"])
-        self.cmd(f"--repo={self.repository_location}", "create", "test3", "input", "--exclude=input/./file1")
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test3")
-        self.assert_equal(sorted(os.listdir("output/input")), ["file2"])
-
-    def test_repeated_files(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input", "input")
-
-    @pytest.mark.skipif("BORG_TESTS_IGNORE_MODES" in os.environ, reason="modes unreliable")
-    @pytest.mark.skipif(is_win32, reason="modes unavailable on Windows")
-    def test_umask(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        mode = os.stat(self.repository_path).st_mode
-        self.assertEqual(stat.S_IMODE(mode), 0o700)
-
-    def test_create_dry_run(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "--dry-run", "test", "input")
-        # Make sure no archive has been created
-        with Repository(self.repository_path) as repository:
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-        self.assert_equal(len(manifest.archives), 0)
-
-    def test_progress_on(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(f"--repo={self.repository_location}", "create", "test4", "input", "--progress")
-        self.assert_in("\r", output)
-
-    def test_progress_off(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(f"--repo={self.repository_location}", "create", "test5", "input")
-        self.assert_not_in("\r", output)
-
-    def test_file_status(self):
-        """test that various file status show expected results
-
-        clearly incomplete: only tests for the weird "unchanged" status for now"""
-        self.create_regular_file("file1", size=1024 * 80)
-        time.sleep(1)  # file2 must have newer timestamps than file1
-        self.create_regular_file("file2", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(f"--repo={self.repository_location}", "create", "--list", "test", "input")
-        self.assert_in("A input/file1", output)
-        self.assert_in("A input/file2", output)
-        # should find first file as unmodified
-        output = self.cmd(f"--repo={self.repository_location}", "create", "--list", "test2", "input")
-        self.assert_in("U input/file1", output)
-        # this is expected, although surprising, for why, see:
-        # https://borgbackup.readthedocs.org/en/latest/faq.html#i-am-seeing-a-added-status-for-a-unchanged-file
-        self.assert_in("A input/file2", output)
-
-    @pytest.mark.skipif(
-        is_win32, reason="ctime attribute is file creation time on Windows"
-    )  # see https://docs.python.org/3/library/os.html#os.stat_result.st_ctime
-    def test_file_status_cs_cache_mode(self):
-        """test that a changed file with faked "previous" mtime still gets backed up in ctime,size cache_mode"""
-        self.create_regular_file("file1", contents=b"123")
-        time.sleep(1)  # file2 must have newer timestamps than file1
-        self.create_regular_file("file2", size=10)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(
-            f"--repo={self.repository_location}", "create", "test1", "input", "--list", "--files-cache=ctime,size"
-        )
-        # modify file1, but cheat with the mtime (and atime) and also keep same size:
-        st = os.stat("input/file1")
-        self.create_regular_file("file1", contents=b"321")
-        os.utime("input/file1", ns=(st.st_atime_ns, st.st_mtime_ns))
-        # this mode uses ctime for change detection, so it should find file1 as modified
-        output = self.cmd(
-            f"--repo={self.repository_location}", "create", "test2", "input", "--list", "--files-cache=ctime,size"
-        )
-        self.assert_in("M input/file1", output)
-
-    def test_file_status_ms_cache_mode(self):
-        """test that a chmod'ed file with no content changes does not get chunked again in mtime,size cache_mode"""
-        self.create_regular_file("file1", size=10)
-        time.sleep(1)  # file2 must have newer timestamps than file1
-        self.create_regular_file("file2", size=10)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(
-            f"--repo={self.repository_location}", "create", "--list", "--files-cache=mtime,size", "test1", "input"
-        )
-        # change mode of file1, no content change:
-        st = os.stat("input/file1")
-        os.chmod("input/file1", st.st_mode ^ stat.S_IRWXO)  # this triggers a ctime change, but mtime is unchanged
-        # this mode uses mtime for change detection, so it should find file1 as unmodified
-        output = self.cmd(
-            f"--repo={self.repository_location}", "create", "--list", "--files-cache=mtime,size", "test2", "input"
-        )
-        self.assert_in("U input/file1", output)
-
-    def test_file_status_rc_cache_mode(self):
-        """test that files get rechunked unconditionally in rechunk,ctime cache mode"""
-        self.create_regular_file("file1", size=10)
-        time.sleep(1)  # file2 must have newer timestamps than file1
-        self.create_regular_file("file2", size=10)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(
-            f"--repo={self.repository_location}", "create", "--list", "--files-cache=rechunk,ctime", "test1", "input"
-        )
-        # no changes here, but this mode rechunks unconditionally
-        output = self.cmd(
-            f"--repo={self.repository_location}", "create", "--list", "--files-cache=rechunk,ctime", "test2", "input"
-        )
-        self.assert_in("A input/file1", output)
-
-    def test_file_status_excluded(self):
-        """test that excluded paths are listed"""
-
-        self.create_regular_file("file1", size=1024 * 80)
-        time.sleep(1)  # file2 must have newer timestamps than file1
-        self.create_regular_file("file2", size=1024 * 80)
-        if has_lchflags:
-            self.create_regular_file("file3", size=1024 * 80)
-            platform.set_flags(os.path.join(self.input_path, "file3"), stat.UF_NODUMP)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(f"--repo={self.repository_location}", "create", "--list", "--exclude-nodump", "test", "input")
-        self.assert_in("A input/file1", output)
-        self.assert_in("A input/file2", output)
-        if has_lchflags:
-            self.assert_in("- input/file3", output)
-        # should find second file as excluded
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "test1",
-            "input",
-            "--list",
-            "--exclude-nodump",
-            "--exclude",
-            "*/file2",
-        )
-        self.assert_in("U input/file1", output)
-        self.assert_in("- input/file2", output)
-        if has_lchflags:
-            self.assert_in("- input/file3", output)
-
-    def test_file_status_counters(self):
-        """Test file status counters in the stats of `borg create --stats`"""
-
-        def to_dict(borg_create_output):
-            borg_create_output = borg_create_output.strip().splitlines()
-            borg_create_output = [line.split(":", 1) for line in borg_create_output]
-            borg_create_output = {
-                key: int(value)
-                for key, value in borg_create_output
-                if key in ("Added files", "Unchanged files", "Modified files")
-            }
-            return borg_create_output
-
-        # Test case set up: create a repository
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        # Archive an empty dir
-        result = self.cmd(f"--repo={self.repository_location}", "create", "--stats", "test_archive", self.input_path)
-        result = to_dict(result)
-        assert result["Added files"] == 0
-        assert result["Unchanged files"] == 0
-        assert result["Modified files"] == 0
-        # Archive a dir with two added files
-        self.create_regular_file("testfile1", contents=b"test1")
-        time.sleep(1.0 if is_darwin else 0.01)  # testfile2 must have newer timestamps than testfile1
-        self.create_regular_file("testfile2", contents=b"test2")
-        result = self.cmd(f"--repo={self.repository_location}", "create", "--stats", "test_archive2", self.input_path)
-        result = to_dict(result)
-        assert result["Added files"] == 2
-        assert result["Unchanged files"] == 0
-        assert result["Modified files"] == 0
-        # Archive a dir with 1 unmodified file and 1 modified
-        self.create_regular_file("testfile1", contents=b"new data")
-        result = self.cmd(f"--repo={self.repository_location}", "create", "--stats", "test_archive3", self.input_path)
-        result = to_dict(result)
-        # Should process testfile2 as added because of
-        # https://borgbackup.readthedocs.io/en/stable/faq.html#i-am-seeing-a-added-status-for-an-unchanged-file
-        assert result["Added files"] == 1
-        assert result["Unchanged files"] == 0
-        assert result["Modified files"] == 1
-
-    def test_create_json(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        create_info = json.loads(self.cmd(f"--repo={self.repository_location}", "create", "--json", "test", "input"))
-        # The usual keys
-        assert "encryption" in create_info
-        assert "repository" in create_info
-        assert "cache" in create_info
-        assert "last_modified" in create_info["repository"]
-
-        archive = create_info["archive"]
-        assert archive["name"] == "test"
-        assert isinstance(archive["command_line"], str)
-        assert isinstance(archive["duration"], float)
-        assert len(archive["id"]) == 64
-        assert "stats" in archive
-
-    def test_create_topical(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        time.sleep(1)  # file2 must have newer timestamps than file1
-        self.create_regular_file("file2", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        # no listing by default
-        output = self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.assert_not_in("file1", output)
-        # shouldn't be listed even if unchanged
-        output = self.cmd(f"--repo={self.repository_location}", "create", "test0", "input")
-        self.assert_not_in("file1", output)
-        # should list the file as unchanged
-        output = self.cmd(f"--repo={self.repository_location}", "create", "test1", "input", "--list", "--filter=U")
-        self.assert_in("file1", output)
-        # should *not* list the file as changed
-        output = self.cmd(f"--repo={self.repository_location}", "create", "test2", "input", "--list", "--filter=AM")
-        self.assert_not_in("file1", output)
-        # change the file
-        self.create_regular_file("file1", size=1024 * 100)
-        # should list the file as changed
-        output = self.cmd(f"--repo={self.repository_location}", "create", "test3", "input", "--list", "--filter=AM")
-        self.assert_in("file1", output)
-
-    @pytest.mark.skipif(not are_fifos_supported() or is_cygwin, reason="FIFOs not supported, hangs on cygwin")
-    def test_create_read_special_symlink(self):
-        from threading import Thread
-
-        def fifo_feeder(fifo_fn, data):
-            fd = os.open(fifo_fn, os.O_WRONLY)
-            try:
-                os.write(fd, data)
-            finally:
-                os.close(fd)
-
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        data = b"foobar" * 1000
-
-        fifo_fn = os.path.join(self.input_path, "fifo")
-        link_fn = os.path.join(self.input_path, "link_fifo")
-        os.mkfifo(fifo_fn)
-        os.symlink(fifo_fn, link_fn)
-
-        t = Thread(target=fifo_feeder, args=(fifo_fn, data))
-        t.start()
+    archive_list = cmd(archiver, f"--repo={repo_location}", "list", "test", "--json-lines")
+    paths = [json.loads(line)["path"] for line in archive_list.split("\n") if line]
+    assert paths == ["input/file1", "input/dir1", "input/file4"]
+
+
+def test_create_paths_from_command(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    create_regular_file(input_path, "file3", size=1024 * 80)
+    create_regular_file(input_path, "file4", size=1024 * 80)
+
+    input_data = "input/file1\ninput/file2\ninput/file3"
+    if is_win32:
+        with open("filenames.cmd", "w") as script:
+            for filename in input_data.splitlines():
+                script.write(f"@echo {filename}\n")
+    cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "--paths-from-command",
+        "test",
+        "--",
+        "filenames.cmd" if is_win32 else "echo",
+        input_data,
+    )
+
+    archive_list = cmd(archiver, f"--repo={repo_location}", "list", "test", "--json-lines")
+    paths = [json.loads(line)["path"] for line in archive_list.split("\n") if line]
+    assert paths == ["input/file1", "input/file2", "input/file3"]
+
+
+def test_create_paths_from_command_with_failed_command(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "--paths-from-command",
+        "test",
+        "--",
+        "sh",
+        "-c",
+        "exit 73;",
+        exit_code=2,
+    )
+    assert output.endswith("Command 'sh' exited with status 73" + os.linesep)
+    archive_list = json.loads(cmd(archiver, f"--repo={repo_location}", "rlist", "--json"))
+    assert archive_list["archives"] == []
+
+
+def test_create_paths_from_command_missing_command(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test", "--paths-from-command", exit_code=2)
+    assert output.endswith("No command given." + os.linesep)
+
+
+def test_create_without_root(archivers, request):
+    """test create without a root"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", exit_code=2)
+
+
+def test_create_pattern_root(archivers, request):
+    """test create with only a root pattern"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test", "-v", "--list", "--pattern=R input")
+    assert "A input/file1" in output
+    assert "A input/file2" in output
+
+
+def test_create_pattern(archivers, request):
+    """test file patterns during create"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    create_regular_file(input_path, "file_important", size=1024 * 80)
+    output = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "-v",
+        "--list",
+        "--pattern=+input/file_important",
+        "--pattern=-input/file*",
+        "test",
+        "input",
+    )
+    assert "A input/file_important" in output
+    assert "- input/file1" in output
+    assert "- input/file2" in output
+
+
+def test_create_pattern_file(archivers, request):
+    """test file patterns during create"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    create_regular_file(input_path, "otherfile", size=1024 * 80)
+    create_regular_file(input_path, "file_important", size=1024 * 80)
+    output = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "-v",
+        "--list",
+        "--pattern=-input/otherfile",
+        "--patterns-from=" + archiver.patterns_file_path,
+        "test",
+        "input",
+    )
+    assert "A input/file_important" in output
+    assert "- input/file1" in output
+    assert "- input/file2" in output
+    assert "- input/otherfile" in output
+
+
+def test_create_pattern_exclude_folder_but_recurse(archivers, request):
+    """test when patterns exclude a parent folder, but include a child"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    patterns_file_path2 = os.path.join(archiver.tmpdir, "patterns2")
+    with open(patterns_file_path2, "wb") as fd:
+        fd.write(b"+ input/x/b\n- input/x*\n")
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "x/a/foo_a", size=1024 * 80)
+    create_regular_file(input_path, "x/b/foo_b", size=1024 * 80)
+    create_regular_file(input_path, "y/foo_y", size=1024 * 80)
+    output = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "-v",
+        "--list",
+        "--patterns-from=" + patterns_file_path2,
+        "test",
+        "input",
+    )
+    assert "- input/x/a/foo_a" in output
+    assert "A input/x/b/foo_b" in output
+    assert "A input/y/foo_y" in output
+
+
+def test_create_pattern_exclude_folder_no_recurse(archivers, request):
+    """test when patterns exclude a parent folder, but include a child"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    patterns_file_path2 = os.path.join(archiver.tmpdir, "patterns2")
+    with open(patterns_file_path2, "wb") as fd:
+        fd.write(b"+ input/x/b\n! input/x*\n")
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "x/a/foo_a", size=1024 * 80)
+    create_regular_file(input_path, "x/b/foo_b", size=1024 * 80)
+    create_regular_file(input_path, "y/foo_y", size=1024 * 80)
+    output = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "-v",
+        "--list",
+        "--patterns-from=" + patterns_file_path2,
+        "test",
+        "input",
+    )
+    assert "input/x/a/foo_a" not in output
+    assert "input/x/a" not in output
+    assert "A input/y/foo_y" in output
+
+
+def test_create_pattern_intermediate_folders_first(archivers, request):
+    """test that intermediate folders appear first when patterns exclude a parent folder but include a child"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    patterns_file_path2 = os.path.join(archiver.tmpdir, "patterns2")
+    with open(patterns_file_path2, "wb") as fd:
+        fd.write(b"+ input/x/a\n+ input/x/b\n- input/x*\n")
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+
+    create_regular_file(input_path, "x/a/foo_a", size=1024 * 80)
+    create_regular_file(input_path, "x/b/foo_b", size=1024 * 80)
+    with changedir("input"):
+        cmd(archiver, f"--repo={repo_location}", "create", "--patterns-from=" + patterns_file_path2, "test", ".")
+
+    # list the archive and verify that the "intermediate" folders appear before
+    # their contents
+    out = cmd(archiver, f"--repo={repo_location}", "list", "test", "--format", "{type} {path}{NL}")
+    out_list = out.splitlines()
+
+    assert "d x/a" in out_list
+    assert "d x/b" in out_list
+    assert out_list.index("d x/a") < out_list.index("- x/a/foo_a")
+    assert out_list.index("d x/b") < out_list.index("- x/b/foo_b")
+
+
+def test_create_no_cache_sync(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_test_files(input_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "rdelete", "--cache-only")
+    create_json = json.loads(
+        cmd(archiver, f"--repo={repo_location}", "create", "--no-cache-sync", "--json", "--error", "test", "input")
+    )  # ignore experimental warning
+    info_json = json.loads(cmd(archiver, f"--repo={repo_location}", "info", "-a", "test", "--json"))
+    create_stats = create_json["cache"]["stats"]
+    info_stats = info_json["cache"]["stats"]
+    assert create_stats == info_stats
+    cmd(archiver, f"--repo={repo_location}", "rdelete", "--cache-only")
+    cmd(archiver, f"--repo={repo_location}", "create", "--no-cache-sync", "test2", "input")
+    cmd(archiver, f"--repo={repo_location}", "rinfo")
+    cmd(archiver, f"--repo={repo_location}", "check")
+
+
+def test_create_archivename_with_placeholder(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_test_files(input_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    ts = "1999-12-31T23:59:59"
+    name_given = "test-{now}"  # placeholder in archive name gets replaced by borg
+    name_expected = f"test-{ts}"  # placeholder in f-string gets replaced by python
+    cmd(archiver, f"--repo={repo_location}", "create", f"--timestamp={ts}", name_given, "input")
+    list_output = cmd(archiver, f"--repo={repo_location}", "rlist", "--short")
+    assert name_expected in list_output
+
+
+def test_exclude_caches(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    _create_test_caches(archiver)
+    cmd(archiver, f"--repo={archiver.repository_location}", "create", "test", "input", "--exclude-caches")
+    _assert_test_caches(archiver)
+
+
+def test_exclude_tagged(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    _create_test_tagged(archiver)
+    cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "test",
+        "input",
+        "--exclude-if-present",
+        ".NOBACKUP",
+        "--exclude-if-present",
+        "00-NOBACKUP",
+    )
+    _assert_test_tagged(archiver)
+
+
+def test_exclude_keep_tagged(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    _create_test_keep_tagged(archiver)
+    cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "test",
+        "input",
+        "--exclude-if-present",
+        ".NOBACKUP1",
+        "--exclude-if-present",
+        ".NOBACKUP2",
+        "--exclude-caches",
+        "--keep-exclude-tags",
+    )
+    _assert_test_keep_tagged(archiver)
+
+
+def test_path_sanitation(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "dir1/dir2/file", size=1024 * 80)
+    with changedir("input/dir1/dir2"):
+        cmd(archiver, f"--repo={repo_location}", "create", "test", "../../../input/dir1/../dir1/dir2/..")
+    output = cmd(archiver, f"--repo={repo_location}", "list", "test")
+    assert ".." not in output
+    assert " input/dir1/dir2/file" in output
+
+
+def test_exclude_sanitation(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    with changedir("input"):
+        cmd(archiver, f"--repo={repo_location}", "create", "test1", ".", "--exclude=file1")
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test1")
+    assert sorted(os.listdir("output")) == ["file2"]
+    with changedir("input"):
+        cmd(archiver, f"--repo={repo_location}", "create", "test2", ".", "--exclude=./file1")
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test2")
+    assert sorted(os.listdir("output")) == ["file2"]
+    cmd(archiver, f"--repo={repo_location}", "create", "test3", "input", "--exclude=input/./file1")
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test3")
+    assert sorted(os.listdir("output/input")) == ["file2"]
+
+
+def test_repeated_files(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input", "input")
+
+
+@pytest.mark.skipif("BORG_TESTS_IGNORE_MODES" in os.environ, reason="modes unreliable")
+@pytest.mark.skipif(is_win32, reason="modes unavailable on Windows")
+def test_umask(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, input_path = archiver.repository_location, archiver.repository_path, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    mode = os.stat(repo_path).st_mode
+    assert stat.S_IMODE(mode) == 0o700
+
+
+def test_create_dry_run(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "--dry-run", "test", "input")
+    # Make sure no archive has been created
+    with Repository(repo_path) as repository:
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+    assert len(manifest.archives) == 0
+
+
+def test_progress_on(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test4", "input", "--progress")
+    assert "\r" in output
+
+
+def test_progress_off(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test5", "input")
+    assert "\r" not in output
+
+
+def test_file_status(archivers, request):
+    """test that various file status show expected results
+    clearly incomplete: only tests for the weird "unchanged" status for now"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    time.sleep(1)  # file2 must have newer timestamps than file1
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(archiver, f"--repo={repo_location}", "create", "--list", "test", "input")
+    assert "A input/file1" in output
+    assert "A input/file2" in output
+    # should find first file as unmodified
+    output = cmd(archiver, f"--repo={repo_location}", "create", "--list", "test2", "input")
+    assert "U input/file1" in output
+    # although surprising, this is expected. For why, see:
+    # https://borgbackup.readthedocs.org/en/latest/faq.html#i-am-seeing-a-added-status-for-a-unchanged-file
+    assert "A input/file2" in output
+
+
+@pytest.mark.skipif(
+    is_win32, reason="ctime attribute is file creation time on Windows"
+)  # see https://docs.python.org/3/library/os.html#os.stat_result.st_ctime
+def test_file_status_cs_cache_mode(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    """test that a changed file with faked "previous" mtime still gets backed up in ctime,size cache_mode"""
+    create_regular_file(input_path, "file1", contents=b"123")
+    time.sleep(1)  # file2 must have newer timestamps than file1
+    create_regular_file(input_path, "file2", size=10)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test1", "input", "--list", "--files-cache=ctime,size")
+    # modify file1, but cheat with the mtime (and atime) and also keep same size:
+    st = os.stat("input/file1")
+    create_regular_file(input_path, "file1", contents=b"321")
+    os.utime("input/file1", ns=(st.st_atime_ns, st.st_mtime_ns))
+    # this mode uses ctime for change detection, so it should find file1 as modified
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test2", "input", "--list", "--files-cache=ctime,size")
+    assert "M input/file1" in output
+
+
+def test_file_status_ms_cache_mode(archivers, request):
+    """test that a chmod'ed file with no content changes does not get chunked again in mtime,size cache_mode"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=10)
+    time.sleep(1)  # file2 must have newer timestamps than file1
+    create_regular_file(input_path, "file2", size=10)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(archiver, f"--repo={repo_location}", "create", "--list", "--files-cache=mtime,size", "test1", "input")
+    # change mode of file1, no content change:
+    st = os.stat("input/file1")
+    os.chmod("input/file1", st.st_mode ^ stat.S_IRWXO)  # this triggers a ctime change, but mtime is unchanged
+    # this mode uses mtime for change detection, so it should find file1 as unmodified
+    output = cmd(archiver, f"--repo={repo_location}", "create", "--list", "--files-cache=mtime,size", "test2", "input")
+    assert "U input/file1" in output
+
+
+def test_file_status_rc_cache_mode(archivers, request):
+    """test that files get rechunked unconditionally in rechunk,ctime cache mode"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=10)
+    time.sleep(1)  # file2 must have newer timestamps than file1
+    create_regular_file(input_path, "file2", size=10)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(
+        archiver, f"--repo={repo_location}", "create", "--list", "--files-cache=rechunk,ctime", "test1", "input"
+    )
+    # no changes here, but this mode rechunks unconditionally
+    output = cmd(
+        archiver, f"--repo={repo_location}", "create", "--list", "--files-cache=rechunk,ctime", "test2", "input"
+    )
+    assert "A input/file1" in output
+
+
+def test_file_status_excluded(archivers, request):
+    """test that excluded paths are listed"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    time.sleep(1)  # file2 must have newer timestamps than file1
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    if has_lchflags:
+        create_regular_file(input_path, "file3", size=1024 * 80)
+        platform.set_flags(os.path.join(input_path, "file3"), stat.UF_NODUMP)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(archiver, f"--repo={repo_location}", "create", "--list", "--exclude-nodump", "test", "input")
+    assert "A input/file1" in output
+    assert "A input/file2" in output
+    if has_lchflags:
+        assert "- input/file3" in output
+    # should find second file as excluded
+    output = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "create",
+        "test1",
+        "input",
+        "--list",
+        "--exclude-nodump",
+        "--exclude",
+        "*/file2",
+    )
+    assert "U input/file1" in output
+    assert "- input/file2" in output
+    if has_lchflags:
+        assert "- input/file3" in output
+
+
+def test_file_status_counters(archivers, request):
+    """Test file status counters in the stats of `borg create --stats`"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    def to_dict(borg_create_output):
+        borg_create_output = borg_create_output.strip().splitlines()
+        borg_create_output = [line.split(":", 1) for line in borg_create_output]
+        borg_create_output = {
+            key: int(value)
+            for key, value in borg_create_output
+            if key in ("Added files", "Unchanged files", "Modified files")
+        }
+        return borg_create_output
+
+    # Test case set up: create a repository
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    # Archive an empty dir
+    result = cmd(archiver, f"--repo={repo_location}", "create", "--stats", "test_archive", input_path)
+    result = to_dict(result)
+    assert result["Added files"] == 0
+    assert result["Unchanged files"] == 0
+    assert result["Modified files"] == 0
+    # Archive a dir with two added files
+    create_regular_file(input_path, "testfile1", contents=b"test1")
+    time.sleep(1.0 if is_darwin else 0.01)  # testfile2 must have newer timestamps than testfile1
+    create_regular_file(input_path, "testfile2", contents=b"test2")
+    result = cmd(archiver, f"--repo={repo_location}", "create", "--stats", "test_archive2", input_path)
+    result = to_dict(result)
+    assert result["Added files"] == 2
+    assert result["Unchanged files"] == 0
+    assert result["Modified files"] == 0
+    # Archive a dir with 1 unmodified file and 1 modified
+    create_regular_file(input_path, "testfile1", contents=b"new data")
+    result = cmd(archiver, f"--repo={repo_location}", "create", "--stats", "test_archive3", input_path)
+    result = to_dict(result)
+    # Should process testfile2 as added because of
+    # https://borgbackup.readthedocs.io/en/stable/faq.html#i-am-seeing-a-added-status-for-an-unchanged-file
+    assert result["Added files"] == 1
+    assert result["Unchanged files"] == 0
+    assert result["Modified files"] == 1
+
+
+def test_create_json(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_info = json.loads(cmd(archiver, f"--repo={repo_location}", "create", "--json", "test", "input"))
+    # The usual keys
+    assert "encryption" in create_info
+    assert "repository" in create_info
+    assert "cache" in create_info
+    assert "last_modified" in create_info["repository"]
+
+    archive = create_info["archive"]
+    assert archive["name"] == "test"
+    assert isinstance(archive["command_line"], str)
+    assert isinstance(archive["duration"], float)
+    assert len(archive["id"]) == 64
+    assert "stats" in archive
+
+
+def test_create_topical(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    time.sleep(1)  # file2 must have newer timestamps than file1
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    # no listing by default
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    assert "file1" not in output
+    # shouldn't be listed even if unchanged
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test0", "input")
+    assert "file1" not in output
+    # should list the file as unchanged
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test1", "input", "--list", "--filter=U")
+    assert "file1" in output
+    # should *not* list the file as changed
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test2", "input", "--list", "--filter=AM")
+    assert "file1" not in output
+    # change the file
+    create_regular_file(input_path, "file1", size=1024 * 100)
+    # should list the file as changed
+    output = cmd(archiver, f"--repo={repo_location}", "create", "test3", "input", "--list", "--filter=AM")
+    assert "file1" in output
+
+
+@pytest.mark.skipif(not are_fifos_supported() or is_cygwin, reason="FIFOs not supported, hangs on cygwin")
+def test_create_read_special_symlink(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    from threading import Thread
+
+    def fifo_feeder(fifo_fn, data):
+        fd = os.open(fifo_fn, os.O_WRONLY)
+        try:
+            os.write(fd, data)
+        finally:
+            os.close(fd)
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    data = b"foobar" * 1000
+
+    fifo_fn = os.path.join(input_path, "fifo")
+    link_fn = os.path.join(input_path, "link_fifo")
+    os.mkfifo(fifo_fn)
+    os.symlink(fifo_fn, link_fn)
+
+    t = Thread(target=fifo_feeder, args=(fifo_fn, data))
+    t.start()
+    try:
+        cmd(archiver, f"--repo={repo_location}", "create", "--read-special", "test", "input/link_fifo")
+    finally:
+        # In case `borg create` failed to open FIFO, read all data to avoid join() hanging.
+        fd = os.open(fifo_fn, os.O_RDONLY | os.O_NONBLOCK)
         try:
         try:
-            self.cmd(f"--repo={self.repository_location}", "create", "--read-special", "test", "input/link_fifo")
+            os.read(fd, len(data))
+        except OSError:
+            # fails on FreeBSD 13 with BlockingIOError
+            pass
         finally:
         finally:
-            # In case `borg create` failed to open FIFO, read all data to avoid join() hanging.
-            fd = os.open(fifo_fn, os.O_RDONLY | os.O_NONBLOCK)
-            try:
-                os.read(fd, len(data))
-            except OSError:
-                # fails on FreeBSD 13 with BlockingIOError
-                pass
-            finally:
-                os.close(fd)
-            t.join()
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-            fifo_fn = "input/link_fifo"
-            with open(fifo_fn, "rb") as f:
-                extracted_data = f.read()
-        assert extracted_data == data
-
-    @pytest.mark.skipif(not are_symlinks_supported(), reason="symlinks not supported")
-    def test_create_read_special_broken_symlink(self):
-        os.symlink("somewhere does not exist", os.path.join(self.input_path, "link"))
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "--read-special", "test", "input")
-        output = self.cmd(f"--repo={self.repository_location}", "list", "test")
-        assert "input/link -> somewhere does not exist" in output
-
-    def test_log_json(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        log = self.cmd(
-            f"--repo={self.repository_location}", "create", "test", "input", "--log-json", "--list", "--debug"
-        )
-        messages = {}  # type -> message, one of each kind
-        for line in log.splitlines():
-            msg = json.loads(line)
-            messages[msg["type"]] = msg
-
-        file_status = messages["file_status"]
-        assert "status" in file_status
-        assert file_status["path"].startswith("input")
-
-        log_message = messages["log_message"]
-        assert isinstance(log_message["time"], float)
-        assert log_message["levelname"] == "DEBUG"  # there should only be DEBUG messages
-        assert isinstance(log_message["message"], str)
-
-    def test_common_options(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        log = self.cmd(f"--repo={self.repository_location}", "--debug", "create", "test", "input")
-        assert "security: read previous location" in log
-
-    def test_hashing_time(self):
-        def extract_hashing_time(borg_create_output):
-            borg_create_output = borg_create_output.strip().splitlines()
-            borg_create_output = [line.split(":", 1) for line in borg_create_output]
-            hashing_time = [line for line in borg_create_output if line[0] == "Time spent in hashing"].pop()
-            hashing_time = hashing_time[1]
-            hashing_time = float(hashing_time.removesuffix(" seconds"))
-            return hashing_time
-
-        # Test case set up: create a repository and a file
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        self.create_regular_file("testfile", contents=randbytes(50000000))
-        # Archive
-        result = self.cmd(f"--repo={self.repository_location}", "create", "--stats", "test_archive", self.input_path)
-        hashing_time = extract_hashing_time(result)
-
-        assert hashing_time > 0.0
-
-    def test_chunking_time(self):
-        def extract_chunking_time(borg_create_output):
-            borg_create_output = borg_create_output.strip().splitlines()
-            borg_create_output = [line.split(":", 1) for line in borg_create_output]
-            chunking_time = [line for line in borg_create_output if line[0] == "Time spent in chunking"].pop()
-            chunking_time = chunking_time[1]
-            chunking_time = float(chunking_time.removesuffix(" seconds"))
-            return chunking_time
-
-        # Test case set up: create a repository and a file
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("testfile", contents=randbytes(50000000))
-        # Archive
-        result = self.cmd(f"--repo={self.repository_location}", "create", "--stats", "test_archive", self.input_path)
-        chunking_time = extract_chunking_time(result)
-
-        assert chunking_time > 0.0
-
-
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    """run the same tests, but with a remote repository"""
-
-
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    """runs the same tests, but via the borg binary"""
-
-    @unittest.skip("test_basic_functionality seems incompatible with fakeroot and/or the binary.")
-    def test_basic_functionality(self):
-        pass
+            os.close(fd)
+        t.join()
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+        fifo_fn = "input/link_fifo"
+        with open(fifo_fn, "rb") as f:
+            extracted_data = f.read()
+    assert extracted_data == data
+
+
+@pytest.mark.skipif(not are_symlinks_supported(), reason="symlinks not supported")
+def test_create_read_special_broken_symlink(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    os.symlink("somewhere does not exist", os.path.join(input_path, "link"))
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "--read-special", "test", "input")
+    output = cmd(archiver, f"--repo={repo_location}", "list", "test")
+    assert "input/link -> somewhere does not exist" in output
+
+
+def test_log_json(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_test_files(input_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    log = cmd(archiver, f"--repo={repo_location}", "create", "test", "input", "--log-json", "--list", "--debug")
+    messages = {}  # type -> message, one of each kind
+    for line in log.splitlines():
+        msg = json.loads(line)
+        messages[msg["type"]] = msg
+
+    file_status = messages["file_status"]
+    assert "status" in file_status
+    assert file_status["path"].startswith("input")
+
+    log_message = messages["log_message"]
+    assert isinstance(log_message["time"], float)
+    assert log_message["levelname"] == "DEBUG"  # there should only be DEBUG messages
+    assert isinstance(log_message["message"], str)
+
+
+def test_common_options(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_test_files(input_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    log = cmd(archiver, f"--repo={repo_location}", "--debug", "create", "test", "input")
+    assert "security: read previous location" in log
+
+
+def test_hashing_time(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    def extract_hashing_time(borg_create_output):
+        borg_create_output = borg_create_output.strip().splitlines()
+        borg_create_output = [line.split(":", 1) for line in borg_create_output]
+        hashing_time = [line for line in borg_create_output if line[0] == "Time spent in hashing"].pop()
+        hashing_time = hashing_time[1]
+        hashing_time = float(hashing_time.removesuffix(" seconds"))
+        return hashing_time
+
+    # Test case set up: create a repository and a file
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    create_regular_file(input_path, "testfile", contents=randbytes(50000000))
+    # Archive
+    result = cmd(archiver, f"--repo={repo_location}", "create", "--stats", "test_archive", input_path)
+    hashing_time = extract_hashing_time(result)
+
+    assert hashing_time > 0.0
+
+
+def test_chunking_time(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    def extract_chunking_time(borg_create_output):
+        borg_create_output = borg_create_output.strip().splitlines()
+        borg_create_output = [line.split(":", 1) for line in borg_create_output]
+        chunking_time = [line for line in borg_create_output if line[0] == "Time spent in chunking"].pop()
+        chunking_time = chunking_time[1]
+        chunking_time = float(chunking_time.removesuffix(" seconds"))
+        return chunking_time
+
+    # Test case set up: create a repository and a file
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "testfile", contents=randbytes(50000000))
+    # Archive
+    result = cmd(archiver, f"--repo={repo_location}", "create", "--stats", "test_archive", input_path)
+    chunking_time = extract_chunking_time(result)
+
+    assert chunking_time > 0.0

+ 201 - 176
src/borg/testsuite/archiver/debug_cmds.py

@@ -1,183 +1,208 @@
 import json
 import json
 import os
 import os
 import pstats
 import pstats
-import unittest
 
 
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
 from .. import changedir
 from .. import changedir
-from . import ArchiverTestCaseBase, RemoteArchiverTestCaseBase, ArchiverTestCaseBinaryBase, RK_ENCRYPTION, BORG_EXES
 from ..compress import Compressor
 from ..compress import Compressor
-
-
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_debug_profile(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input", "--debug-profile=create.prof")
-        self.cmd("debug", "convert-profile", "create.prof", "create.pyprof")
-        stats = pstats.Stats("create.pyprof")
-        stats.strip_dirs()
-        stats.sort_stats("cumtime")
-
-        self.cmd(f"--repo={self.repository_location}", "create", "test2", "input", "--debug-profile=create.pyprof")
-        stats = pstats.Stats("create.pyprof")  # Only do this on trusted data!
-        stats.strip_dirs()
-        stats.sort_stats("cumtime")
-
-    def test_debug_dump_archive_items(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        with changedir("output"):
-            output = self.cmd(f"--repo={self.repository_location}", "debug", "dump-archive-items", "test")
-        output_dir = sorted(os.listdir("output"))
-        assert len(output_dir) > 0 and output_dir[0].startswith("000000_")
-        assert "Done." in output
-
-    def test_debug_dump_repo_objs(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        with changedir("output"):
-            output = self.cmd(f"--repo={self.repository_location}", "debug", "dump-repo-objs")
-        output_dir = sorted(os.listdir("output"))
-        assert len(output_dir) > 0 and output_dir[0].startswith("00000000_")
-        assert "Done." in output
-
-    def test_debug_put_get_delete_obj(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        data = b"some data"
-        self.create_regular_file("file", contents=data)
-        output = self.cmd(f"--repo={self.repository_location}", "debug", "id-hash", "input/file")
-        id_hash = output.strip()
-        output = self.cmd(f"--repo={self.repository_location}", "debug", "put-obj", id_hash, "input/file")
-        assert id_hash in output
-        output = self.cmd(f"--repo={self.repository_location}", "debug", "get-obj", id_hash, "output/file")
-        assert id_hash in output
-        with open("output/file", "rb") as f:
-            data_read = f.read()
-        assert data == data_read
-        output = self.cmd(f"--repo={self.repository_location}", "debug", "delete-obj", id_hash)
-        assert "deleted" in output
-        output = self.cmd(f"--repo={self.repository_location}", "debug", "delete-obj", id_hash)
-        assert "not found" in output
-        output = self.cmd(f"--repo={self.repository_location}", "debug", "delete-obj", "invalid")
-        assert "is invalid" in output
-
-    def test_debug_id_hash_format_put_get_parse_obj(self):
-        """Test format-obj and parse-obj commands"""
-
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        data = b"some data" * 100
-        meta_dict = {"some": "property"}
-        meta = json.dumps(meta_dict).encode()
-
-        self.create_regular_file("plain.bin", contents=data)
-        self.create_regular_file("meta.json", contents=meta)
-
-        output = self.cmd(f"--repo={self.repository_location}", "debug", "id-hash", "input/plain.bin")
-        id_hash = output.strip()
-
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "debug",
-            "format-obj",
-            id_hash,
-            "input/plain.bin",
-            "input/meta.json",
-            "output/data.bin",
-            "--compression=zstd,2",
-        )
-
-        output = self.cmd(f"--repo={self.repository_location}", "debug", "put-obj", id_hash, "output/data.bin")
-        assert id_hash in output
-
-        output = self.cmd(f"--repo={self.repository_location}", "debug", "get-obj", id_hash, "output/object.bin")
-        assert id_hash in output
-
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "debug",
-            "parse-obj",
-            id_hash,
-            "output/object.bin",
-            "output/plain.bin",
-            "output/meta.json",
-        )
-
-        with open("output/plain.bin", "rb") as f:
-            data_read = f.read()
-        assert data == data_read
-
-        with open("output/meta.json") as f:
-            meta_read = json.load(f)
-        for key, value in meta_dict.items():
-            assert meta_read.get(key) == value
-
-        assert meta_read.get("size") == len(data_read)
-
-        c = Compressor(name="zstd", level=2)
-        _, data_compressed = c.compress(meta_dict, data=data)
-        assert meta_read.get("csize") == len(data_compressed)
-        assert meta_read.get("ctype") == c.compressor.ID
-        assert meta_read.get("clevel") == c.compressor.level
-
-    def test_debug_dump_manifest(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        dump_file = self.output_path + "/dump"
-        output = self.cmd(f"--repo={self.repository_location}", "debug", "dump-manifest", dump_file)
-        assert output == ""
-        with open(dump_file) as f:
-            result = json.load(f)
-        assert "archives" in result
-        assert "config" in result
-        assert "item_keys" in result["config"]
-        assert frozenset(result["config"]["item_keys"]) == ITEM_KEYS
-        assert "timestamp" in result
-        assert "version" in result
-
-    def test_debug_dump_archive(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        dump_file = self.output_path + "/dump"
-        output = self.cmd(f"--repo={self.repository_location}", "debug", "dump-archive", "test", dump_file)
-        assert output == ""
-        with open(dump_file) as f:
-            result = json.load(f)
-        assert "_name" in result
-        assert "_manifest_entry" in result
-        assert "_meta" in result
-        assert "_items" in result
-
-    def test_debug_refcount_obj(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(f"--repo={self.repository_location}", "debug", "refcount-obj", "0" * 64).strip()
-        assert (
-            output
-            == "object 0000000000000000000000000000000000000000000000000000000000000000 not found [info from chunks cache]."
-        )
-
-        create_json = json.loads(self.cmd(f"--repo={self.repository_location}", "create", "--json", "test", "input"))
-        archive_id = create_json["archive"]["id"]
-        output = self.cmd(f"--repo={self.repository_location}", "debug", "refcount-obj", archive_id).strip()
-        assert output == "object " + archive_id + " has 1 referrers [info from chunks cache]."
-
-        # Invalid IDs do not abort or return an error
-        output = self.cmd(f"--repo={self.repository_location}", "debug", "refcount-obj", "124", "xyza").strip()
-        assert output == "object id 124 is invalid." + os.linesep + "object id xyza is invalid."
-
-    def test_debug_info(self):
-        output = self.cmd("debug", "info")
-        assert "Python" in output
-
-
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    """run the same tests, but with a remote repository"""
-
-
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    """runs the same tests, but via the borg binary"""
+from . import cmd, create_test_files, create_regular_file, generate_archiver_tests, RK_ENCRYPTION
+
+pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary")  # NOQA
+
+
+def test_debug_profile(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_test_files(input_path)
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input", "--debug-profile=create.prof")
+    cmd(archiver, "debug", "convert-profile", "create.prof", "create.pyprof")
+    stats = pstats.Stats("create.pyprof")
+    stats.strip_dirs()
+    stats.sort_stats("cumtime")
+    cmd(archiver, f"--repo={repo_location}", "create", "test2", "input", "--debug-profile=create.pyprof")
+    stats = pstats.Stats("create.pyprof")  # Only do this on trusted data!
+    stats.strip_dirs()
+    stats.sort_stats("cumtime")
+
+
+def test_debug_dump_archive_items(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_test_files(input_path)
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    with changedir("output"):
+        output = cmd(archiver, f"--repo={repo_location}", "debug", "dump-archive-items", "test")
+    output_dir = sorted(os.listdir("output"))
+    assert len(output_dir) > 0 and output_dir[0].startswith("000000_")
+    assert "Done." in output
+
+
+def test_debug_dump_repo_objs(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_test_files(input_path)
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    with changedir("output"):
+        output = cmd(archiver, f"--repo={repo_location}", "debug", "dump-repo-objs")
+    output_dir = sorted(os.listdir("output"))
+    assert len(output_dir) > 0 and output_dir[0].startswith("00000000_")
+    assert "Done." in output
+
+
+def test_debug_put_get_delete_obj(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    data = b"some data"
+    create_regular_file(input_path, "file", contents=data)
+
+    output = cmd(archiver, f"--repo={repo_location}", "debug", "id-hash", "input/file")
+    id_hash = output.strip()
+
+    output = cmd(archiver, f"--repo={repo_location}", "debug", "put-obj", id_hash, "input/file")
+    assert id_hash in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "debug", "get-obj", id_hash, "output/file")
+    assert id_hash in output
+
+    with open("output/file", "rb") as f:
+        data_read = f.read()
+    assert data == data_read
+
+    output = cmd(archiver, f"--repo={repo_location}", "debug", "delete-obj", id_hash)
+    assert "deleted" in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "debug", "delete-obj", id_hash)
+    assert "not found" in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "debug", "delete-obj", "invalid")
+    assert "is invalid" in output
+
+
+def test_debug_id_hash_format_put_get_parse_obj(archivers, request):
+    """Test format-obj and parse-obj commands"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    data = b"some data" * 100
+    meta_dict = {"some": "property"}
+    meta = json.dumps(meta_dict).encode()
+    create_regular_file(input_path, "plain.bin", contents=data)
+    create_regular_file(input_path, "meta.json", contents=meta)
+    output = cmd(archiver, f"--repo={repo_location}", "debug", "id-hash", "input/plain.bin")
+    id_hash = output.strip()
+    cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "debug",
+        "format-obj",
+        id_hash,
+        "input/plain.bin",
+        "input/meta.json",
+        "output/data.bin",
+        "--compression=zstd,2",
+    )
+    output = cmd(archiver, f"--repo={repo_location}", "debug", "put-obj", id_hash, "output/data.bin")
+    assert id_hash in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "debug", "get-obj", id_hash, "output/object.bin")
+    assert id_hash in output
+
+    cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "debug",
+        "parse-obj",
+        id_hash,
+        "output/object.bin",
+        "output/plain.bin",
+        "output/meta.json",
+    )
+    with open("output/plain.bin", "rb") as f:
+        data_read = f.read()
+    assert data == data_read
+
+    with open("output/meta.json") as f:
+        meta_read = json.load(f)
+    for key, value in meta_dict.items():
+        assert meta_read.get(key) == value
+    assert meta_read.get("size") == len(data_read)
+
+    c = Compressor(name="zstd", level=2)
+    _, data_compressed = c.compress(meta_dict, data=data)
+    assert meta_read.get("csize") == len(data_compressed)
+    assert meta_read.get("ctype") == c.compressor.ID
+    assert meta_read.get("clevel") == c.compressor.level
+
+
+def test_debug_dump_manifest(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    dump_file = archiver.output_path + "/dump"
+    output = cmd(archiver, f"--repo={repo_location}", "debug", "dump-manifest", dump_file)
+    assert output == ""
+
+    with open(dump_file) as f:
+        result = json.load(f)
+    assert "archives" in result
+    assert "config" in result
+    assert "timestamp" in result
+    assert "version" in result
+    assert "item_keys" in result["config"]
+    assert frozenset(result["config"]["item_keys"]) == ITEM_KEYS
+
+
+def test_debug_dump_archive(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    dump_file = archiver.output_path + "/dump"
+    output = cmd(archiver, f"--repo={repo_location}", "debug", "dump-archive", "test", dump_file)
+    assert output == ""
+
+    with open(dump_file) as f:
+        result = json.load(f)
+    assert "_name" in result
+    assert "_manifest_entry" in result
+    assert "_meta" in result
+    assert "_items" in result
+
+
+def test_debug_refcount_obj(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(archiver, f"--repo={repo_location}", "debug", "refcount-obj", "0" * 64).strip()
+    info = "object 0000000000000000000000000000000000000000000000000000000000000000 not found [info from chunks cache]."
+    assert output == info
+
+    create_json = json.loads(cmd(archiver, f"--repo={repo_location}", "create", "--json", "test", "input"))
+    archive_id = create_json["archive"]["id"]
+    output = cmd(archiver, f"--repo={repo_location}", "debug", "refcount-obj", archive_id).strip()
+    assert output == f"object {archive_id} has 1 referrers [info from chunks cache]."
+
+    # Invalid IDs do not abort or return an error
+    output = cmd(archiver, f"--repo={repo_location}", "debug", "refcount-obj", "124", "xyza").strip()
+    assert output == f"object id 124 is invalid.{os.linesep}object id xyza is invalid."
+
+
+def test_debug_info(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    output = cmd(archiver, "debug", "info")
+    assert "Python" in output

+ 77 - 72
src/borg/testsuite/archiver/delete_cmd.py

@@ -1,85 +1,90 @@
-import unittest
-
 from ...archive import Archive
 from ...archive import Archive
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
 from ...manifest import Manifest
 from ...manifest import Manifest
 from ...repository import Repository
 from ...repository import Repository
-from . import ArchiverTestCaseBase, RemoteArchiverTestCaseBase, ArchiverTestCaseBinaryBase, RK_ENCRYPTION, BORG_EXES
-from . import src_file
+from . import cmd, create_regular_file, src_file, create_src_archive, generate_archiver_tests, RK_ENCRYPTION
+
+pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary")  # NOQA
+
+
+def test_delete(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, input_path = archiver.repository_location, archiver.repository_path, archiver.input_path
+
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "dir2/file2", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    cmd(archiver, f"--repo={repo_location}", "create", "test.2", "input")
+    cmd(archiver, f"--repo={repo_location}", "create", "test.3", "input")
+    cmd(archiver, f"--repo={repo_location}", "create", "another_test.1", "input")
+    cmd(archiver, f"--repo={repo_location}", "create", "another_test.2", "input")
+    cmd(archiver, f"--repo={repo_location}", "extract", "test", "--dry-run")
+    cmd(archiver, f"--repo={repo_location}", "extract", "test.2", "--dry-run")
+    cmd(archiver, f"--repo={repo_location}", "delete", "--match-archives", "sh:another_*")
+    cmd(archiver, f"--repo={repo_location}", "delete", "--last", "1")
+    cmd(archiver, f"--repo={repo_location}", "delete", "-a", "test")
+    cmd(archiver, f"--repo={repo_location}", "extract", "test.2", "--dry-run")
+    output = cmd(archiver, f"--repo={repo_location}", "delete", "-a", "test.2", "--stats")
+    assert "Original size: -" in output  # negative size == deleted data
+    # Make sure all data except the manifest has been deleted
+    with Repository(repo_path) as repository:
+        assert len(repository) == 1
+
 
 
+def test_delete_multiple(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
 
 
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_delete(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("dir2/file2", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.cmd(f"--repo={self.repository_location}", "create", "test.2", "input")
-        self.cmd(f"--repo={self.repository_location}", "create", "test.3", "input")
-        self.cmd(f"--repo={self.repository_location}", "create", "another_test.1", "input")
-        self.cmd(f"--repo={self.repository_location}", "create", "another_test.2", "input")
-        self.cmd(f"--repo={self.repository_location}", "extract", "test", "--dry-run")
-        self.cmd(f"--repo={self.repository_location}", "extract", "test.2", "--dry-run")
-        self.cmd(f"--repo={self.repository_location}", "delete", "--match-archives", "sh:another_*")
-        self.cmd(f"--repo={self.repository_location}", "delete", "--last", "1")
-        self.cmd(f"--repo={self.repository_location}", "delete", "-a", "test")
-        self.cmd(f"--repo={self.repository_location}", "extract", "test.2", "--dry-run")
-        output = self.cmd(f"--repo={self.repository_location}", "delete", "-a", "test.2", "--stats")
-        self.assert_in("Original size: -", output)  # negative size == deleted data
-        # Make sure all data except the manifest has been deleted
-        with Repository(self.repository_path) as repository:
-            self.assert_equal(len(repository), 1)
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test1", "input")
+    cmd(archiver, f"--repo={repo_location}", "create", "test2", "input")
+    cmd(archiver, f"--repo={repo_location}", "create", "test3", "input")
+    cmd(archiver, f"--repo={repo_location}", "delete", "-a", "test1")
+    cmd(archiver, f"--repo={repo_location}", "delete", "-a", "test2")
+    cmd(archiver, f"--repo={repo_location}", "extract", "test3", "--dry-run")
+    cmd(archiver, f"--repo={repo_location}", "delete", "-a", "test3")
+    assert not cmd(archiver, f"--repo={repo_location}", "rlist")
 
 
-    def test_delete_multiple(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test1", "input")
-        self.cmd(f"--repo={self.repository_location}", "create", "test2", "input")
-        self.cmd(f"--repo={self.repository_location}", "create", "test3", "input")
-        self.cmd(f"--repo={self.repository_location}", "delete", "-a", "test1")
-        self.cmd(f"--repo={self.repository_location}", "delete", "-a", "test2")
-        self.cmd(f"--repo={self.repository_location}", "extract", "test3", "--dry-run")
-        self.cmd(f"--repo={self.repository_location}", "delete", "-a", "test3")
-        assert not self.cmd(f"--repo={self.repository_location}", "rlist")
 
 
-    def test_delete_force(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        self.create_src_archive("test")
-        with Repository(self.repository_path, exclusive=True) as repository:
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-            archive = Archive(manifest, "test")
-            for item in archive.iter_items():
-                if item.path.endswith(src_file):
-                    repository.delete(item.chunks[-1].id)
-                    break
-            else:
-                assert False  # missed the file
-            repository.commit(compact=False)
-        output = self.cmd(f"--repo={self.repository_location}", "delete", "-a", "test", "--force")
-        self.assert_in("deleted archive was corrupted", output)
-        self.cmd(f"--repo={self.repository_location}", "check", "--repair")
-        output = self.cmd(f"--repo={self.repository_location}", "rlist")
-        self.assert_not_in("test", output)
+def test_delete_force(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
 
 
-    def test_delete_double_force(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        self.create_src_archive("test")
-        with Repository(self.repository_path, exclusive=True) as repository:
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-            archive = Archive(manifest, "test")
-            id = archive.metadata.items[0]
-            repository.put(id, b"corrupted items metadata stream chunk")
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "delete", "-a", "test", "--force", "--force")
-        self.cmd(f"--repo={self.repository_location}", "check", "--repair")
-        output = self.cmd(f"--repo={self.repository_location}", "rlist")
-        self.assert_not_in("test", output)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    create_src_archive(archiver, "test")
+    with Repository(repo_path, exclusive=True) as repository:
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+        archive = Archive(manifest, "test")
+        for item in archive.iter_items():
+            if item.path.endswith(src_file):
+                repository.delete(item.chunks[-1].id)
+                break
+        else:
+            assert False  # missed the file
+        repository.commit(compact=False)
+    output = cmd(archiver, f"--repo={repo_location}", "delete", "-a", "test", "--force")
+    assert "deleted archive was corrupted" in output
 
 
+    cmd(archiver, f"--repo={repo_location}", "check", "--repair")
+    output = cmd(archiver, f"--repo={repo_location}", "rlist")
+    assert "test" not in output
 
 
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    """run the same tests, but with a remote repository"""
 
 
+def test_delete_double_force(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
 
 
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    """runs the same tests, but via the borg binary"""
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    create_src_archive(archiver, "test")
+    with Repository(repo_path, exclusive=True) as repository:
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+        archive = Archive(manifest, "test")
+        id = archive.metadata.items[0]
+        repository.put(id, b"corrupted items metadata stream chunk")
+        repository.commit(compact=False)
+    cmd(archiver, f"--repo={repo_location}", "delete", "-a", "test", "--force", "--force")
+    cmd(archiver, f"--repo={repo_location}", "check", "--repair")
+    output = cmd(archiver, f"--repo={repo_location}", "rlist")
+    assert "test" not in output

+ 286 - 310
src/borg/testsuite/archiver/diff_cmd.py

@@ -2,321 +2,297 @@ import json
 import os
 import os
 import stat
 import stat
 import time
 import time
-import unittest
 
 
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
 from .. import are_symlinks_supported, are_hardlinks_supported
 from .. import are_symlinks_supported, are_hardlinks_supported
 from ..platform import is_win32, is_darwin
 from ..platform import is_win32, is_darwin
-from . import ArchiverTestCaseBase, RemoteArchiverTestCaseBase, ArchiverTestCaseBinaryBase, RK_ENCRYPTION, BORG_EXES
-
-
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_basic_functionality(self):
-        # Setup files for the first snapshot
-        self.create_regular_file("empty", size=0)
-        self.create_regular_file("file_unchanged", size=128)
-        self.create_regular_file("file_removed", size=256)
-        self.create_regular_file("file_removed2", size=512)
-        self.create_regular_file("file_replaced", size=1024)
-        os.mkdir("input/dir_replaced_with_file")
-        os.chmod("input/dir_replaced_with_file", stat.S_IFDIR | 0o755)
-        os.mkdir("input/dir_removed")
+from . import cmd, create_regular_file, RK_ENCRYPTION, assert_line_exists, generate_archiver_tests
+
+pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary")  # NOQA
+
+
+def test_basic_functionality(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    # Setup files for the first snapshot
+    create_regular_file(input_path, "empty", size=0)
+    create_regular_file(input_path, "file_unchanged", size=128)
+    create_regular_file(input_path, "file_removed", size=256)
+    create_regular_file(input_path, "file_removed2", size=512)
+    create_regular_file(input_path, "file_replaced", size=1024)
+    os.mkdir("input/dir_replaced_with_file")
+    os.chmod("input/dir_replaced_with_file", stat.S_IFDIR | 0o755)
+    os.mkdir("input/dir_removed")
+    if are_symlinks_supported():
+        os.mkdir("input/dir_replaced_with_link")
+        os.symlink("input/dir_replaced_with_file", "input/link_changed")
+        os.symlink("input/file_unchanged", "input/link_removed")
+        os.symlink("input/file_removed2", "input/link_target_removed")
+        os.symlink("input/empty", "input/link_target_contents_changed")
+        os.symlink("input/empty", "input/link_replaced_by_file")
+    if are_hardlinks_supported():
+        os.link("input/file_replaced", "input/hardlink_target_replaced")
+        os.link("input/empty", "input/hardlink_contents_changed")
+        os.link("input/file_removed", "input/hardlink_removed")
+        os.link("input/file_removed2", "input/hardlink_target_removed")
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    # Create the first snapshot
+    cmd(archiver, f"--repo={repo_location}", "create", "test0", "input")
+    # Setup files for the second snapshot
+    create_regular_file(input_path, "file_added", size=2048)
+    create_regular_file(input_path, "file_empty_added", size=0)
+    os.unlink("input/file_replaced")
+    create_regular_file(input_path, "file_replaced", contents=b"0" * 4096)
+    os.unlink("input/file_removed")
+    os.unlink("input/file_removed2")
+    os.rmdir("input/dir_replaced_with_file")
+    create_regular_file(input_path, "dir_replaced_with_file", size=8192)
+    os.chmod("input/dir_replaced_with_file", stat.S_IFREG | 0o755)
+    os.mkdir("input/dir_added")
+    os.rmdir("input/dir_removed")
+    if are_symlinks_supported():
+        os.rmdir("input/dir_replaced_with_link")
+        os.symlink("input/dir_added", "input/dir_replaced_with_link")
+        os.unlink("input/link_changed")
+        os.symlink("input/dir_added", "input/link_changed")
+        os.symlink("input/dir_added", "input/link_added")
+        os.unlink("input/link_replaced_by_file")
+        create_regular_file(input_path, "link_replaced_by_file", size=16384)
+        os.unlink("input/link_removed")
+    if are_hardlinks_supported():
+        os.unlink("input/hardlink_removed")
+        os.link("input/file_added", "input/hardlink_added")
+    with open("input/empty", "ab") as fd:
+        fd.write(b"appended_data")
+    # Create the second snapshot
+    cmd(archiver, f"--repo={repo_location}", "create", "test1a", "input")
+    cmd(archiver, f"--repo={repo_location}", "create", "test1b", "input", "--chunker-params", "16,18,17,4095")
+
+    def do_asserts(output, can_compare_ids, content_only=False):
+        lines: list = output.splitlines()
+        assert "file_replaced" in output  # added to debug #3494
+        change = "modified.*B" if can_compare_ids else r"modified:  \(can't get size\)"
+        assert_line_exists(lines, f"{change}.*input/file_replaced")
+        # File unchanged
+        assert "input/file_unchanged" not in output
+
+        # Directory replaced with a regular file
+        if "BORG_TESTS_IGNORE_MODES" not in os.environ and not is_win32 and not content_only:
+            assert_line_exists(lines, "[drwxr-xr-x -> -rwxr-xr-x].*input/dir_replaced_with_file")
+
+        # Basic directory cases
+        assert "added directory             input/dir_added" in output
+        assert "removed directory           input/dir_removed" in output
+
+        if are_symlinks_supported():
+            # Basic symlink cases
+            assert_line_exists(lines, "changed link.*input/link_changed")
+            assert_line_exists(lines, "added link.*input/link_added")
+            assert_line_exists(lines, "removed link.*input/link_removed")
+
+            # Symlink replacing or being replaced
+            if not content_only:
+                assert "input/dir_replaced_with_link" in output
+                assert "input/link_replaced_by_file" in output
+
+            # Symlink target removed. Should not affect the symlink at all.
+            assert "input/link_target_removed" not in output
+
+        # The inode has two links and the file contents changed. Borg
+        # should notice the changes in both links. However, the symlink
+        # pointing to the file is not changed.
+        change = "modified.*0 B" if can_compare_ids else r"modified:  \(can't get size\)"
+        assert_line_exists(lines, f"{change}.*input/empty")
+        if are_hardlinks_supported():
+            assert_line_exists(lines, f"{change}.*input/hardlink_contents_changed")
+        if are_symlinks_supported():
+            assert "input/link_target_contents_changed" not in output
+
+        # Added a new file and a hard link to it. Both links to the same
+        # inode should appear as separate files.
+        assert "added:              2.05 kB input/file_added" in output
+        if are_hardlinks_supported():
+            assert "added:              2.05 kB input/hardlink_added" in output
+
+        # check if a diff between nonexistent and empty new file is found
+        assert "added:                  0 B input/file_empty_added" in output
+
+        # The inode has two links and both of them are deleted. They should
+        # appear as two deleted files.
+        assert "removed:              256 B input/file_removed" in output
+        if are_hardlinks_supported():
+            assert "removed:              256 B input/hardlink_removed" in output
+
+        if are_hardlinks_supported() and content_only:
+            # Another link (marked previously as the source in borg) to the
+            # same inode was removed. This should only change the ctime since removing
+            # the link would result in the decrementation of the inode's hard-link count.
+            assert "input/hardlink_target_removed" not in output
+
+            # Another link (marked previously as the source in borg) to the
+            # same inode was replaced with a new regular file. This should only change
+            # its ctime. This should not be reflected in the output if content-only is set
+            assert "input/hardlink_target_replaced" not in output
+
+    def do_json_asserts(output, can_compare_ids, content_only=False):
+        def get_changes(filename, data):
+            chgsets = [j["changes"] for j in data if j["path"] == filename]
+            assert len(chgsets) < 2
+            # return a flattened list of changes for given filename
+            return sum(chgsets, [])
+
+        # convert output to list of dicts
+        joutput = [json.loads(line) for line in output.split("\n") if line]
+
+        # File contents changed (deleted and replaced with a new file)
+        expected = {"type": "modified", "added": 4096, "removed": 1024} if can_compare_ids else {"type": "modified"}
+        assert expected in get_changes("input/file_replaced", joutput)
+
+        # File unchanged
+        assert not any(get_changes("input/file_unchanged", joutput))
+
+        # Directory replaced with a regular file
+        if "BORG_TESTS_IGNORE_MODES" not in os.environ and not is_win32 and not content_only:
+            assert {"type": "changed mode", "item1": "drwxr-xr-x", "item2": "-rwxr-xr-x"} in get_changes(
+                "input/dir_replaced_with_file", joutput
+            )
+
+        # Basic directory cases
+        assert {"type": "added directory"} in get_changes("input/dir_added", joutput)
+        assert {"type": "removed directory"} in get_changes("input/dir_removed", joutput)
+
         if are_symlinks_supported():
         if are_symlinks_supported():
-            os.mkdir("input/dir_replaced_with_link")
-            os.symlink("input/dir_replaced_with_file", "input/link_changed")
-            os.symlink("input/file_unchanged", "input/link_removed")
-            os.symlink("input/file_removed2", "input/link_target_removed")
-            os.symlink("input/empty", "input/link_target_contents_changed")
-            os.symlink("input/empty", "input/link_replaced_by_file")
+            # Basic symlink cases
+            assert {"type": "changed link"} in get_changes("input/link_changed", joutput)
+            assert {"type": "added link"} in get_changes("input/link_added", joutput)
+            assert {"type": "removed link"} in get_changes("input/link_removed", joutput)
+
+            # Symlink replacing or being replaced
+
+            if not content_only:
+                assert any(
+                    chg["type"] == "changed mode" and chg["item1"].startswith("d") and chg["item2"].startswith("l")
+                    for chg in get_changes("input/dir_replaced_with_link", joutput)
+                ), get_changes("input/dir_replaced_with_link", joutput)
+                assert any(
+                    chg["type"] == "changed mode" and chg["item1"].startswith("l") and chg["item2"].startswith("-")
+                    for chg in get_changes("input/link_replaced_by_file", joutput)
+                ), get_changes("input/link_replaced_by_file", joutput)
+
+            # Symlink target removed. Should not affect the symlink at all.
+            assert not any(get_changes("input/link_target_removed", joutput))
+
+        # The inode has two links and the file contents changed. Borg
+        # should notice the changes in both links. However, the symlink
+        # pointing to the file is not changed.
+        expected = {"type": "modified", "added": 13, "removed": 0} if can_compare_ids else {"type": "modified"}
+        assert expected in get_changes("input/empty", joutput)
         if are_hardlinks_supported():
         if are_hardlinks_supported():
-            os.link("input/file_replaced", "input/hardlink_target_replaced")
-            os.link("input/empty", "input/hardlink_contents_changed")
-            os.link("input/file_removed", "input/hardlink_removed")
-            os.link("input/file_removed2", "input/hardlink_target_removed")
-
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-
-        # Create the first snapshot
-        self.cmd(f"--repo={self.repository_location}", "create", "test0", "input")
-
-        # Setup files for the second snapshot
-        self.create_regular_file("file_added", size=2048)
-        self.create_regular_file("file_empty_added", size=0)
-        os.unlink("input/file_replaced")
-        self.create_regular_file("file_replaced", contents=b"0" * 4096)
-        os.unlink("input/file_removed")
-        os.unlink("input/file_removed2")
-        os.rmdir("input/dir_replaced_with_file")
-        self.create_regular_file("dir_replaced_with_file", size=8192)
-        os.chmod("input/dir_replaced_with_file", stat.S_IFREG | 0o755)
-        os.mkdir("input/dir_added")
-        os.rmdir("input/dir_removed")
+            assert expected in get_changes("input/hardlink_contents_changed", joutput)
         if are_symlinks_supported():
         if are_symlinks_supported():
-            os.rmdir("input/dir_replaced_with_link")
-            os.symlink("input/dir_added", "input/dir_replaced_with_link")
-            os.unlink("input/link_changed")
-            os.symlink("input/dir_added", "input/link_changed")
-            os.symlink("input/dir_added", "input/link_added")
-            os.unlink("input/link_replaced_by_file")
-            self.create_regular_file("link_replaced_by_file", size=16384)
-            os.unlink("input/link_removed")
+            assert not any(get_changes("input/link_target_contents_changed", joutput))
+
+        # Added a new file and a hard link to it. Both links to the same
+        # inode should appear as separate files.
+        assert {"added": 2048, "removed": 0, "type": "added"} in get_changes("input/file_added", joutput)
+        if are_hardlinks_supported():
+            assert {"added": 2048, "removed": 0, "type": "added"} in get_changes("input/hardlink_added", joutput)
+
+        # check if a diff between nonexistent and empty new file is found
+        assert {"added": 0, "removed": 0, "type": "added"} in get_changes("input/file_empty_added", joutput)
+
+        # The inode has two links and both of them are deleted. They should
+        # appear as two deleted files.
+        assert {"added": 0, "removed": 256, "type": "removed"} in get_changes("input/file_removed", joutput)
         if are_hardlinks_supported():
         if are_hardlinks_supported():
-            os.unlink("input/hardlink_removed")
-            os.link("input/file_added", "input/hardlink_added")
-
-        with open("input/empty", "ab") as fd:
-            fd.write(b"appended_data")
-
-        # Create the second snapshot
-        self.cmd(f"--repo={self.repository_location}", "create", "test1a", "input")
-        self.cmd(f"--repo={self.repository_location}", "create", "test1b", "input", "--chunker-params", "16,18,17,4095")
-
-        def do_asserts(output, can_compare_ids, content_only=False):
-            lines: list = output.splitlines()
-            assert "file_replaced" in output  # added to debug #3494
-            change = "modified.*B" if can_compare_ids else r"modified:  \(can't get size\)"
-            self.assert_line_exists(lines, f"{change}.*input/file_replaced")
-            # File unchanged
-            assert "input/file_unchanged" not in output
-
-            # Directory replaced with a regular file
-            if "BORG_TESTS_IGNORE_MODES" not in os.environ and not is_win32 and not content_only:
-                self.assert_line_exists(lines, "[drwxr-xr-x -> -rwxr-xr-x].*input/dir_replaced_with_file")
-
-            # Basic directory cases
-            assert "added directory             input/dir_added" in output
-            assert "removed directory           input/dir_removed" in output
-
-            if are_symlinks_supported():
-                # Basic symlink cases
-                self.assert_line_exists(lines, "changed link.*input/link_changed")
-                self.assert_line_exists(lines, "added link.*input/link_added")
-                self.assert_line_exists(lines, "removed link.*input/link_removed")
-
-                # Symlink replacing or being replaced
-                if not content_only:
-                    assert "input/dir_replaced_with_link" in output
-                    assert "input/link_replaced_by_file" in output
-
-                # Symlink target removed. Should not affect the symlink at all.
-                assert "input/link_target_removed" not in output
-
-            # The inode has two links and the file contents changed. Borg
-            # should notice the changes in both links. However, the symlink
-            # pointing to the file is not changed.
-            change = "modified.*0 B" if can_compare_ids else r"modified:  \(can't get size\)"
-            self.assert_line_exists(lines, f"{change}.*input/empty")
-            if are_hardlinks_supported():
-                self.assert_line_exists(lines, f"{change}.*input/hardlink_contents_changed")
-            if are_symlinks_supported():
-                assert "input/link_target_contents_changed" not in output
-
-            # Added a new file and a hard link to it. Both links to the same
-            # inode should appear as separate files.
-            assert "added:              2.05 kB input/file_added" in output
-            if are_hardlinks_supported():
-                assert "added:              2.05 kB input/hardlink_added" in output
-
-            # check if a diff between nonexistent and empty new file is found
-            assert "added:                  0 B input/file_empty_added" in output
-
-            # The inode has two links and both of them are deleted. They should
-            # appear as two deleted files.
-            assert "removed:              256 B input/file_removed" in output
-            if are_hardlinks_supported():
-                assert "removed:              256 B input/hardlink_removed" in output
-
-            if are_hardlinks_supported() and content_only:
-                # Another link (marked previously as the source in borg) to the
-                # same inode was removed. This should only change the ctime since removing
-                # the link would result in the decrementation of the inode's hard-link count.
-                assert "input/hardlink_target_removed" not in output
-
-                # Another link (marked previously as the source in borg) to the
-                # same inode was replaced with a new regular file. This should only change
-                # its ctime. This should not be reflected in the output if content-only is set
-                assert "input/hardlink_target_replaced" not in output
-
-        def do_json_asserts(output, can_compare_ids, content_only=False):
-            def get_changes(filename, data):
-                chgsets = [j["changes"] for j in data if j["path"] == filename]
-                assert len(chgsets) < 2
-                # return a flattened list of changes for given filename
-                return sum(chgsets, [])
-
-            # convert output to list of dicts
-            joutput = [json.loads(line) for line in output.split("\n") if line]
-
-            # File contents changed (deleted and replaced with a new file)
-            expected = {"type": "modified", "added": 4096, "removed": 1024} if can_compare_ids else {"type": "modified"}
-            assert expected in get_changes("input/file_replaced", joutput)
-
-            # File unchanged
-            assert not any(get_changes("input/file_unchanged", joutput))
-
-            # Directory replaced with a regular file
-            if "BORG_TESTS_IGNORE_MODES" not in os.environ and not is_win32 and not content_only:
-                assert {"type": "changed mode", "item1": "drwxr-xr-x", "item2": "-rwxr-xr-x"} in get_changes(
-                    "input/dir_replaced_with_file", joutput
-                )
-
-            # Basic directory cases
-            assert {"type": "added directory"} in get_changes("input/dir_added", joutput)
-            assert {"type": "removed directory"} in get_changes("input/dir_removed", joutput)
-
-            if are_symlinks_supported():
-                # Basic symlink cases
-                assert {"type": "changed link"} in get_changes("input/link_changed", joutput)
-                assert {"type": "added link"} in get_changes("input/link_added", joutput)
-                assert {"type": "removed link"} in get_changes("input/link_removed", joutput)
-
-                # Symlink replacing or being replaced
-
-                if not content_only:
-                    assert any(
-                        chg["type"] == "changed mode" and chg["item1"].startswith("d") and chg["item2"].startswith("l")
-                        for chg in get_changes("input/dir_replaced_with_link", joutput)
-                    ), get_changes("input/dir_replaced_with_link", joutput)
-                    assert any(
-                        chg["type"] == "changed mode" and chg["item1"].startswith("l") and chg["item2"].startswith("-")
-                        for chg in get_changes("input/link_replaced_by_file", joutput)
-                    ), get_changes("input/link_replaced_by_file", joutput)
-
-                # Symlink target removed. Should not affect the symlink at all.
-                assert not any(get_changes("input/link_target_removed", joutput))
-
-            # The inode has two links and the file contents changed. Borg
-            # should notice the changes in both links. However, the symlink
-            # pointing to the file is not changed.
-            expected = {"type": "modified", "added": 13, "removed": 0} if can_compare_ids else {"type": "modified"}
-            assert expected in get_changes("input/empty", joutput)
-            if are_hardlinks_supported():
-                assert expected in get_changes("input/hardlink_contents_changed", joutput)
-            if are_symlinks_supported():
-                assert not any(get_changes("input/link_target_contents_changed", joutput))
-
-            # Added a new file and a hard link to it. Both links to the same
-            # inode should appear as separate files.
-            assert {"added": 2048, "removed": 0, "type": "added"} in get_changes("input/file_added", joutput)
-            if are_hardlinks_supported():
-                assert {"added": 2048, "removed": 0, "type": "added"} in get_changes("input/hardlink_added", joutput)
-
-            # check if a diff between nonexistent and empty new file is found
-            assert {"added": 0, "removed": 0, "type": "added"} in get_changes("input/file_empty_added", joutput)
-
-            # The inode has two links and both of them are deleted. They should
-            # appear as two deleted files.
-            assert {"added": 0, "removed": 256, "type": "removed"} in get_changes("input/file_removed", joutput)
-            if are_hardlinks_supported():
-                assert {"added": 0, "removed": 256, "type": "removed"} in get_changes("input/hardlink_removed", joutput)
-
-            if are_hardlinks_supported() and content_only:
-                # Another link (marked previously as the source in borg) to the
-                # same inode was removed. This should only change the ctime since removing
-                # the link would result in the decrementation of the inode's hard-link count.
-                assert not any(get_changes("input/hardlink_target_removed", joutput))
-
-                # Another link (marked previously as the source in borg) to the
-                # same inode was replaced with a new regular file. This should only change
-                # its ctime. This should not be reflected in the output if content-only is set
-                assert not any(get_changes("input/hardlink_target_replaced", joutput))
-
-        output = self.cmd(f"--repo={self.repository_location}", "diff", "test0", "test1a")
-        do_asserts(output, True)
-        # We expect exit_code=1 due to the chunker params warning
-        output = self.cmd(
-            f"--repo={self.repository_location}", "diff", "test0", "test1b", "--content-only", exit_code=1
-        )
-        do_asserts(output, False, content_only=True)
-
-        output = self.cmd(f"--repo={self.repository_location}", "diff", "test0", "test1a", "--json-lines")
-        do_json_asserts(output, True)
-
-        output = self.cmd(
-            f"--repo={self.repository_location}", "diff", "test0", "test1a", "--json-lines", "--content-only"
-        )
-        do_json_asserts(output, True, content_only=True)
-
-    def test_time_diffs(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("test_file", size=10)
-        self.cmd(f"--repo={self.repository_location}", "create", "archive1", "input")
-        time.sleep(0.1)
-        os.unlink("input/test_file")
-        if is_win32:
-            # Sleeping for 15s because Windows doesn't refresh ctime if file is deleted and recreated within 15 seconds.
-            time.sleep(15)
-        elif is_darwin:
-            time.sleep(1)  # HFS has a 1s timestamp granularity
-        self.create_regular_file("test_file", size=15)
-        self.cmd(f"--repo={self.repository_location}", "create", "archive2", "input")
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "diff",
-            "archive1",
-            "archive2",
-            "--format",
-            "'{mtime}{ctime} {path}{NL}'",
-        )
-        self.assert_in("mtime", output)
-        self.assert_in("ctime", output)  # Should show up on windows as well since it is a new file.
-        if is_darwin:
-            time.sleep(1)  # HFS has a 1s timestamp granularity
-        os.chmod("input/test_file", 0o777)
-        self.cmd(f"--repo={self.repository_location}", "create", "archive3", "input")
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "diff",
-            "archive2",
-            "archive3",
-            "--format",
-            "'{mtime}{ctime} {path}{NL}'",
-        )
-        self.assert_not_in("mtime", output)
-        # Checking platform because ctime should not be shown on windows since it wasn't recreated.
-        if not is_win32:
-            self.assert_in("ctime", output)
-        else:
-            self.assert_not_in("ctime", output)
-
-    def test_sort_option(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-
-        self.create_regular_file("a_file_removed", size=8)
-        self.create_regular_file("f_file_removed", size=16)
-        self.create_regular_file("c_file_changed", size=32)
-        self.create_regular_file("e_file_changed", size=64)
-        self.cmd(f"--repo={self.repository_location}", "create", "test0", "input")
-
-        os.unlink("input/a_file_removed")
-        os.unlink("input/f_file_removed")
-        os.unlink("input/c_file_changed")
-        os.unlink("input/e_file_changed")
-        self.create_regular_file("c_file_changed", size=512)
-        self.create_regular_file("e_file_changed", size=1024)
-        self.create_regular_file("b_file_added", size=128)
-        self.create_regular_file("d_file_added", size=256)
-        self.cmd(f"--repo={self.repository_location}", "create", "test1", "input")
-
-        output = self.cmd(f"--repo={self.repository_location}", "diff", "test0", "test1", "--sort", "--content-only")
-        expected = [
-            "a_file_removed",
-            "b_file_added",
-            "c_file_changed",
-            "d_file_added",
-            "e_file_changed",
-            "f_file_removed",
-        ]
-        assert isinstance(output, str)
-        outputs = output.splitlines()
-        assert len(outputs) == len(expected)
-        assert all(x in line for x, line in zip(expected, outputs))
-
-
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    """run the same tests, but with a remote repository"""
-
-
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    """runs the same tests, but via the borg binary"""
+            assert {"added": 0, "removed": 256, "type": "removed"} in get_changes("input/hardlink_removed", joutput)
+
+        if are_hardlinks_supported() and content_only:
+            # Another link (marked previously as the source in borg) to the
+            # same inode was removed. This should only change the ctime since removing
+            # the link would result in the decrementation of the inode's hard-link count.
+            assert not any(get_changes("input/hardlink_target_removed", joutput))
+
+            # Another link (marked previously as the source in borg) to the
+            # same inode was replaced with a new regular file. This should only change
+            # its ctime. This should not be reflected in the output if content-only is set
+            assert not any(get_changes("input/hardlink_target_replaced", joutput))
+
+    output = cmd(archiver, f"--repo={repo_location}", "diff", "test0", "test1a")
+    do_asserts(output, True)
+
+    # We expect exit_code=1 due to the chunker params warning
+    output = cmd(archiver, f"--repo={repo_location}", "diff", "test0", "test1b", "--content-only", exit_code=1)
+    do_asserts(output, False, content_only=True)
+
+    output = cmd(archiver, f"--repo={repo_location}", "diff", "test0", "test1a", "--json-lines")
+    do_json_asserts(output, True)
+
+    output = cmd(archiver, f"--repo={repo_location}", "diff", "test0", "test1a", "--json-lines", "--content-only")
+    do_json_asserts(output, True, content_only=True)
+
+
+def test_time_diffs(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "test_file", size=10)
+    cmd(archiver, f"--repo={repo_location}", "create", "archive1", "input")
+    time.sleep(0.1)
+    os.unlink("input/test_file")
+    if is_win32:
+        # Sleeping for 15s because Windows doesn't refresh ctime if file is deleted and recreated within 15 seconds.
+        time.sleep(15)
+    elif is_darwin:
+        time.sleep(1)  # HFS has a 1s timestamp granularity
+    create_regular_file(input_path, "test_file", size=15)
+    cmd(archiver, f"--repo={repo_location}", "create", "archive2", "input")
+    output = cmd(
+        archiver, f"--repo={repo_location}", "diff", "archive1", "archive2", "--format", "'{mtime}{ctime} {path}{NL}'"
+    )
+    assert "mtime" in output
+    assert "ctime" in output  # Should show up on Windows as well since it is a new file.
+
+    if is_darwin:
+        time.sleep(1)  # HFS has a 1s timestamp granularity
+    os.chmod("input/test_file", 0o777)
+    cmd(archiver, f"--repo={repo_location}", "create", "archive3", "input")
+    output = cmd(
+        archiver, f"--repo={repo_location}", "diff", "archive2", "archive3", "--format", "'{mtime}{ctime} {path}{NL}'"
+    )
+    assert "mtime" not in output
+    # Checking platform because ctime should not be shown on Windows since it wasn't recreated.
+    if not is_win32:
+        assert "ctime" in output
+    else:
+        assert "ctime" not in output
+
+
+def test_sort_option(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+
+    create_regular_file(input_path, "a_file_removed", size=8)
+    create_regular_file(input_path, "f_file_removed", size=16)
+    create_regular_file(input_path, "c_file_changed", size=32)
+    create_regular_file(input_path, "e_file_changed", size=64)
+    cmd(archiver, f"--repo={repo_location}", "create", "test0", "input")
+
+    os.unlink("input/a_file_removed")
+    os.unlink("input/f_file_removed")
+    os.unlink("input/c_file_changed")
+    os.unlink("input/e_file_changed")
+    create_regular_file(input_path, "c_file_changed", size=512)
+    create_regular_file(input_path, "e_file_changed", size=1024)
+    create_regular_file(input_path, "b_file_added", size=128)
+    create_regular_file(input_path, "d_file_added", size=256)
+    cmd(archiver, f"--repo={repo_location}", "create", "test1", "input")
+
+    output = cmd(archiver, f"--repo={repo_location}", "diff", "test0", "test1", "--sort", "--content-only")
+    expected = ["a_file_removed", "b_file_added", "c_file_changed", "d_file_added", "e_file_changed", "f_file_removed"]
+    assert isinstance(output, str)
+    outputs = output.splitlines()
+    assert len(outputs) == len(expected)
+    assert all(x in line for x, line in zip(expected, outputs))

+ 8 - 7
src/borg/testsuite/archiver/disk_full.py

@@ -20,13 +20,14 @@ import shutil
 import pytest
 import pytest
 
 
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
-from . import cmd, environment_variable
+from .. import environment_variable
+from . import cmd_fixture
 
 
 DF_MOUNT = "/tmp/borg-mount"
 DF_MOUNT = "/tmp/borg-mount"
 
 
 
 
 @pytest.mark.skipif(not os.path.exists(DF_MOUNT), reason="needs a 16MB fs mounted on %s" % DF_MOUNT)
 @pytest.mark.skipif(not os.path.exists(DF_MOUNT), reason="needs a 16MB fs mounted on %s" % DF_MOUNT)
-def test_disk_full(cmd):
+def test_disk_full(cmd_fixture):
     def make_files(dir, count, size, rnd=True):
     def make_files(dir, count, size, rnd=True):
         shutil.rmtree(dir, ignore_errors=True)
         shutil.rmtree(dir, ignore_errors=True)
         os.mkdir(dir)
         os.mkdir(dir)
@@ -51,7 +52,7 @@ def test_disk_full(cmd):
             shutil.rmtree(input, ignore_errors=True)
             shutil.rmtree(input, ignore_errors=True)
             # keep some space and some inodes in reserve that we can free up later:
             # keep some space and some inodes in reserve that we can free up later:
             make_files(reserve, 80, 100000, rnd=False)
             make_files(reserve, 80, 100000, rnd=False)
-            rc, out = cmd(f"--repo={repo}", "rcreate")
+            rc, out = cmd_fixture(f"--repo={repo}", "rcreate")
             if rc != EXIT_SUCCESS:
             if rc != EXIT_SUCCESS:
                 print("rcreate", rc, out)
                 print("rcreate", rc, out)
             assert rc == EXIT_SUCCESS
             assert rc == EXIT_SUCCESS
@@ -67,7 +68,7 @@ def test_disk_full(cmd):
                             break
                             break
                         raise
                         raise
                     try:
                     try:
-                        rc, out = cmd("--repo=%s" % repo, "create", "test%03d" % i, input)
+                        rc, out = cmd_fixture("--repo=%s" % repo, "create", "test%03d" % i, input)
                         success = rc == EXIT_SUCCESS
                         success = rc == EXIT_SUCCESS
                         if not success:
                         if not success:
                             print("create", rc, out)
                             print("create", rc, out)
@@ -77,12 +78,12 @@ def test_disk_full(cmd):
                         os.remove(os.path.join(repo, "lock.roster"))
                         os.remove(os.path.join(repo, "lock.roster"))
             finally:
             finally:
                 # now some error happened, likely we are out of disk space.
                 # now some error happened, likely we are out of disk space.
-                # free some space so we can expect borg to be able to work normally:
+                # free some space such that we can expect borg to be able to work normally:
                 shutil.rmtree(reserve, ignore_errors=True)
                 shutil.rmtree(reserve, ignore_errors=True)
-            rc, out = cmd(f"--repo={repo}", "rlist")
+            rc, out = cmd_fixture(f"--repo={repo}", "rlist")
             if rc != EXIT_SUCCESS:
             if rc != EXIT_SUCCESS:
                 print("rlist", rc, out)
                 print("rlist", rc, out)
-            rc, out = cmd(f"--repo={repo}", "check", "--repair")
+            rc, out = cmd_fixture(f"--repo={repo}", "check", "--repair")
             if rc != EXIT_SUCCESS:
             if rc != EXIT_SUCCESS:
                 print("check", rc, out)
                 print("check", rc, out)
             assert rc == EXIT_SUCCESS
             assert rc == EXIT_SUCCESS

+ 742 - 674
src/borg/testsuite/archiver/extract_cmd.py

@@ -2,7 +2,6 @@ import errno
 import os
 import os
 import shutil
 import shutil
 import time
 import time
-import unittest
 from unittest.mock import patch
 from unittest.mock import patch
 
 
 import pytest
 import pytest
@@ -16,685 +15,754 @@ from .. import changedir, same_ts_ns
 from .. import are_symlinks_supported, are_hardlinks_supported, is_utime_fully_supported, is_birthtime_fully_supported
 from .. import are_symlinks_supported, are_hardlinks_supported, is_utime_fully_supported, is_birthtime_fully_supported
 from ..platform import is_darwin, is_win32
 from ..platform import is_darwin, is_win32
 from . import (
 from . import (
-    ArchiverTestCaseBase,
-    ArchiverTestCaseBinaryBase,
-    RemoteArchiverTestCaseBase,
     RK_ENCRYPTION,
     RK_ENCRYPTION,
     requires_hardlinks,
     requires_hardlinks,
-    BORG_EXES,
+    cmd,
+    create_test_files,
+    create_regular_file,
+    assert_dirs_equal,
+    _extract_hardlinks_setup,
+    assert_creates_file,
+    generate_archiver_tests,
 )
 )
 
 
+pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary")  # NOQA
 
 
-class ArchiverTestCase(ArchiverTestCaseBase):
-    @pytest.mark.skipif(not are_symlinks_supported(), reason="symlinks not supported")
-    def test_symlink_extract(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-            assert os.readlink("input/link1") == "somewhere"
-
-    @pytest.mark.skipif(
-        not are_symlinks_supported() or not are_hardlinks_supported() or is_darwin,
-        reason="symlinks or hardlinks or hardlinked symlinks not supported",
-    )
-    def test_hardlinked_symlinks_extract(self):
-        self.create_regular_file("target", size=1024)
-        with changedir("input"):
-            os.symlink("target", "symlink1")
-            os.link("symlink1", "symlink2", follow_symlinks=False)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        with changedir("output"):
-            output = self.cmd(f"--repo={self.repository_location}", "extract", "test")
-            print(output)
-            with changedir("input"):
-                assert os.path.exists("target")
-                assert os.readlink("symlink1") == "target"
-                assert os.readlink("symlink2") == "target"
-                st1 = os.stat("symlink1", follow_symlinks=False)
-                st2 = os.stat("symlink2", follow_symlinks=False)
-                assert st1.st_nlink == 2
-                assert st2.st_nlink == 2
-                assert st1.st_ino == st2.st_ino
-                assert st1.st_size == st2.st_size
-
-    @pytest.mark.skipif(not is_utime_fully_supported(), reason="cannot properly setup and execute test without utime")
-    def test_directory_timestamps1(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-
-        # default file archiving order (internal recursion)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-        # extracting a file inside a directory touches the directory mtime
-        assert os.path.exists("output/input/dir2/file2")
-        # make sure borg fixes the directory mtime after touching it
-        sti = os.stat("input/dir2")
-        sto = os.stat("output/input/dir2")
-        assert same_ts_ns(sti.st_mtime_ns, sto.st_mtime_ns)
-
-    @pytest.mark.skipif(not is_utime_fully_supported(), reason="cannot properly setup and execute test without utime")
-    def test_directory_timestamps2(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-
-        # given order, dir first, file second
-        flist_dir_first = b"input/dir2\ninput/dir2/file2\n"
-        self.cmd(f"--repo={self.repository_location}", "create", "--paths-from-stdin", "test", input=flist_dir_first)
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-        # extracting a file inside a directory touches the directory mtime
-        assert os.path.exists("output/input/dir2/file2")
-        # make sure borg fixes the directory mtime after touching it
-        sti = os.stat("input/dir2")
-        sto = os.stat("output/input/dir2")
-        assert same_ts_ns(sti.st_mtime_ns, sto.st_mtime_ns)
-
-    @pytest.mark.skipif(not is_utime_fully_supported(), reason="cannot properly setup and execute test without utime")
-    def test_directory_timestamps3(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-
-        # given order, file first, dir second
-        flist_file_first = b"input/dir2/file2\ninput/dir2\n"
-        self.cmd(f"--repo={self.repository_location}", "create", "--paths-from-stdin", "test", input=flist_file_first)
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-        # extracting a file inside a directory touches the directory mtime
-        assert os.path.exists("output/input/dir2/file2")
-        # make sure borg fixes the directory mtime after touching it
-        sti = os.stat("input/dir2")
-        sto = os.stat("output/input/dir2")
-        assert same_ts_ns(sti.st_mtime_ns, sto.st_mtime_ns)
-
-    @pytest.mark.skipif(not is_utime_fully_supported(), reason="cannot properly setup and execute test without utime")
-    def test_atime(self):
-        def has_noatime(some_file):
-            atime_before = os.stat(some_file).st_atime_ns
-            try:
-                with open(os.open(some_file, flags_noatime)) as file:
-                    file.read()
-            except PermissionError:
-                return False
-            else:
-                atime_after = os.stat(some_file).st_atime_ns
-                noatime_used = flags_noatime != flags_normal
-                return noatime_used and atime_before == atime_after
-
-        self.create_test_files()
-        atime, mtime = 123456780, 234567890
-        have_noatime = has_noatime("input/file1")
-        os.utime("input/file1", (atime, mtime))
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "--atime", "test", "input")
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-        sti = os.stat("input/file1")
-        sto = os.stat("output/input/file1")
-        assert same_ts_ns(sti.st_mtime_ns, sto.st_mtime_ns)
-        assert same_ts_ns(sto.st_mtime_ns, mtime * 1e9)
-        if have_noatime:
-            assert same_ts_ns(sti.st_atime_ns, sto.st_atime_ns)
-            assert same_ts_ns(sto.st_atime_ns, atime * 1e9)
-        else:
-            # it touched the input file's atime while backing it up
-            assert same_ts_ns(sto.st_atime_ns, atime * 1e9)
-
-    @pytest.mark.skipif(not is_utime_fully_supported(), reason="cannot properly setup and execute test without utime")
-    @pytest.mark.skipif(
-        not is_birthtime_fully_supported(), reason="cannot properly setup and execute test without birthtime"
-    )
-    def test_birthtime(self):
-        self.create_test_files()
-        birthtime, mtime, atime = 946598400, 946684800, 946771200
-        os.utime("input/file1", (atime, birthtime))
-        os.utime("input/file1", (atime, mtime))
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-        sti = os.stat("input/file1")
-        sto = os.stat("output/input/file1")
-        assert same_ts_ns(sti.st_birthtime * 1e9, sto.st_birthtime * 1e9)
-        assert same_ts_ns(sto.st_birthtime * 1e9, birthtime * 1e9)
-        assert same_ts_ns(sti.st_mtime_ns, sto.st_mtime_ns)
-        assert same_ts_ns(sto.st_mtime_ns, mtime * 1e9)
-
-    #
-    @pytest.mark.skipif(is_win32, reason="frequent test failures on github CI on win32")
-    def test_sparse_file(self):
-        def is_sparse(fn, total_size, hole_size):
-            st = os.stat(fn)
-            assert st.st_size == total_size
-            sparse = True
-            if sparse and hasattr(st, "st_blocks") and st.st_blocks * 512 >= st.st_size:
-                sparse = False
-            if sparse and has_seek_hole:
-                with open(fn, "rb") as fd:
-                    # only check if the first hole is as expected, because the 2nd hole check
-                    # is problematic on xfs due to its "dynamic speculative EOF preallocation
-                    try:
-                        if fd.seek(0, os.SEEK_HOLE) != 0:
-                            sparse = False
-                        if fd.seek(0, os.SEEK_DATA) != hole_size:
-                            sparse = False
-                    except OSError:
-                        # OS/FS does not really support SEEK_HOLE/SEEK_DATA
-                        sparse = False
-            return sparse
-
-        filename = os.path.join(self.input_path, "sparse")
-        content = b"foobar"
-        hole_size = 5 * (1 << CHUNK_MAX_EXP)  # 5 full chunker buffers
-        total_size = hole_size + len(content) + hole_size
-        with open(filename, "wb") as fd:
-            # create a file that has a hole at the beginning and end (if the
-            # OS and filesystem supports sparse files)
-            fd.seek(hole_size, 1)
-            fd.write(content)
-            fd.seek(hole_size, 1)
-            pos = fd.tell()
-            fd.truncate(pos)
-        # we first check if we could create a sparse input file:
-        sparse_support = is_sparse(filename, total_size, hole_size)
-        if sparse_support:
-            # we could create a sparse input file, so creating a backup of it and
-            # extracting it again (as sparse) should also work:
-            self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-            self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-            with changedir(self.output_path):
-                self.cmd(f"--repo={self.repository_location}", "extract", "test", "--sparse")
-            self.assert_dirs_equal("input", "output/input")
-            filename = os.path.join(self.output_path, "input", "sparse")
-            with open(filename, "rb") as fd:
-                # check if file contents are as expected
-                self.assert_equal(fd.read(hole_size), b"\0" * hole_size)
-                self.assert_equal(fd.read(len(content)), content)
-                self.assert_equal(fd.read(hole_size), b"\0" * hole_size)
-            assert is_sparse(filename, total_size, hole_size)
-
-    def test_unusual_filenames(self):
-        filenames = ["normal", "with some blanks", "(with_parens)"]
-        for filename in filenames:
-            filename = os.path.join(self.input_path, filename)
-            with open(filename, "wb"):
-                pass
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        for filename in filenames:
-            with changedir("output"):
-                self.cmd(f"--repo={self.repository_location}", "extract", "test", os.path.join("input", filename))
-            assert os.path.exists(os.path.join("output", "input", filename))
-
-    def test_strip_components(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("dir/file")
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test", "--strip-components", "3")
-            assert not os.path.exists("file")
-            with self.assert_creates_file("file"):
-                self.cmd(f"--repo={self.repository_location}", "extract", "test", "--strip-components", "2")
-            with self.assert_creates_file("dir/file"):
-                self.cmd(f"--repo={self.repository_location}", "extract", "test", "--strip-components", "1")
-            with self.assert_creates_file("input/dir/file"):
-                self.cmd(f"--repo={self.repository_location}", "extract", "test", "--strip-components", "0")
-
-    @requires_hardlinks
-    def test_extract_hardlinks1(self):
-        self._extract_hardlinks_setup()
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-            assert os.stat("input/source").st_nlink == 4
-            assert os.stat("input/abba").st_nlink == 4
-            assert os.stat("input/dir1/hardlink").st_nlink == 4
-            assert os.stat("input/dir1/subdir/hardlink").st_nlink == 4
-            assert open("input/dir1/subdir/hardlink", "rb").read() == b"123456"
-
-    @requires_hardlinks
-    def test_extract_hardlinks2(self):
-        self._extract_hardlinks_setup()
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test", "--strip-components", "2")
-            assert os.stat("hardlink").st_nlink == 2
-            assert os.stat("subdir/hardlink").st_nlink == 2
-            assert open("subdir/hardlink", "rb").read() == b"123456"
-            assert os.stat("aaaa").st_nlink == 2
-            assert os.stat("source2").st_nlink == 2
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test", "input/dir1")
-            assert os.stat("input/dir1/hardlink").st_nlink == 2
-            assert os.stat("input/dir1/subdir/hardlink").st_nlink == 2
-            assert open("input/dir1/subdir/hardlink", "rb").read() == b"123456"
-            assert os.stat("input/dir1/aaaa").st_nlink == 2
-            assert os.stat("input/dir1/source2").st_nlink == 2
-
-    @requires_hardlinks
-    def test_extract_hardlinks_twice(self):
-        # setup for #5603
-        path_a = os.path.join(self.input_path, "a")
-        path_b = os.path.join(self.input_path, "b")
-        os.mkdir(path_a)
-        os.mkdir(path_b)
-        hl_a = os.path.join(path_a, "hardlink")
-        hl_b = os.path.join(path_b, "hardlink")
-        self.create_regular_file(hl_a, contents=b"123456")
-        os.link(hl_a, hl_b)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input", "input")  # give input twice!
-        # now test extraction
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-            # if issue #5603 happens, extraction gives rc == 1 (triggering AssertionError) and warnings like:
-            # input/a/hardlink: link: [Errno 2] No such file or directory: 'input/a/hardlink' -> 'input/a/hardlink'
-            # input/b/hardlink: link: [Errno 2] No such file or directory: 'input/a/hardlink' -> 'input/b/hardlink'
-            # otherwise, when fixed, the hardlinks should be there and have a link count of 2
-            assert os.stat("input/a/hardlink").st_nlink == 2
-            assert os.stat("input/b/hardlink").st_nlink == 2
-
-    def test_extract_include_exclude(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("file2", size=1024 * 80)
-        self.create_regular_file("file3", size=1024 * 80)
-        self.create_regular_file("file4", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "create", "--exclude=input/file4", "test", "input")
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test", "input/file1")
-        self.assert_equal(sorted(os.listdir("output/input")), ["file1"])
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test", "--exclude=input/file2")
-        self.assert_equal(sorted(os.listdir("output/input")), ["file1", "file3"])
-        with changedir("output"):
-            self.cmd(
-                f"--repo={self.repository_location}", "extract", "test", "--exclude-from=" + self.exclude_file_path
-            )
-        self.assert_equal(sorted(os.listdir("output/input")), ["file1", "file3"])
-
-    def test_extract_include_exclude_regex(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("file2", size=1024 * 80)
-        self.create_regular_file("file3", size=1024 * 80)
-        self.create_regular_file("file4", size=1024 * 80)
-        self.create_regular_file("file333", size=1024 * 80)
-
-        # Create with regular expression exclusion for file4
-        self.cmd(f"--repo={self.repository_location}", "create", "--exclude=re:input/file4$", "test", "input")
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-        self.assert_equal(sorted(os.listdir("output/input")), ["file1", "file2", "file3", "file333"])
-        shutil.rmtree("output/input")
-
-        # Extract with regular expression exclusion
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test", "--exclude=re:file3+")
-        self.assert_equal(sorted(os.listdir("output/input")), ["file1", "file2"])
-        shutil.rmtree("output/input")
-
-        # Combine --exclude with fnmatch and regular expression
-        with changedir("output"):
-            self.cmd(
-                f"--repo={self.repository_location}",
-                "extract",
-                "test",
-                "--exclude=input/file2",
-                "--exclude=re:file[01]",
-            )
-        self.assert_equal(sorted(os.listdir("output/input")), ["file3", "file333"])
-        shutil.rmtree("output/input")
-
-        # Combine --exclude-from and regular expression exclusion
-        with changedir("output"):
-            self.cmd(
-                f"--repo={self.repository_location}",
-                "extract",
-                "test",
-                "--exclude-from=" + self.exclude_file_path,
-                "--exclude=re:file1",
-                "--exclude=re:file(\\d)\\1\\1$",
-            )
-        self.assert_equal(sorted(os.listdir("output/input")), ["file3"])
-
-    def test_extract_include_exclude_regex_from_file(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("file2", size=1024 * 80)
-        self.create_regular_file("file3", size=1024 * 80)
-        self.create_regular_file("file4", size=1024 * 80)
-        self.create_regular_file("file333", size=1024 * 80)
-
-        # Create while excluding using mixed pattern styles
-        with open(self.exclude_file_path, "wb") as fd:
-            fd.write(b"re:input/file4$\n")
-            fd.write(b"fm:*file3*\n")
-
-        self.cmd(
-            f"--repo={self.repository_location}", "create", "--exclude-from=" + self.exclude_file_path, "test", "input"
-        )
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-        self.assert_equal(sorted(os.listdir("output/input")), ["file1", "file2"])
-        shutil.rmtree("output/input")
-
-        # Exclude using regular expression
-        with open(self.exclude_file_path, "wb") as fd:
-            fd.write(b"re:file3+\n")
-
-        with changedir("output"):
-            self.cmd(
-                f"--repo={self.repository_location}", "extract", "test", "--exclude-from=" + self.exclude_file_path
-            )
-        self.assert_equal(sorted(os.listdir("output/input")), ["file1", "file2"])
-        shutil.rmtree("output/input")
-
-        # Mixed exclude pattern styles
-        with open(self.exclude_file_path, "wb") as fd:
-            fd.write(b"re:file(\\d)\\1\\1$\n")
-            fd.write(b"fm:nothingwillmatchthis\n")
-            fd.write(b"*/file1\n")
-            fd.write(b"re:file2$\n")
-
-        with changedir("output"):
-            self.cmd(
-                f"--repo={self.repository_location}", "extract", "test", "--exclude-from=" + self.exclude_file_path
-            )
-        self.assert_equal(sorted(os.listdir("output/input")), [])
-
-    def test_extract_with_pattern(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("file2", size=1024 * 80)
-        self.create_regular_file("file3", size=1024 * 80)
-        self.create_regular_file("file4", size=1024 * 80)
-        self.create_regular_file("file333", size=1024 * 80)
-
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-
-        # Extract everything with regular expression
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test", "re:.*")
-        self.assert_equal(sorted(os.listdir("output/input")), ["file1", "file2", "file3", "file333", "file4"])
-        shutil.rmtree("output/input")
 
 
-        # Extract with pattern while also excluding files
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "--exclude=re:file[34]$", "test", r"re:file\d$")
-        self.assert_equal(sorted(os.listdir("output/input")), ["file1", "file2"])
-        shutil.rmtree("output/input")
-
-        # Combine --exclude with pattern for extraction
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "--exclude=input/file1", "test", "re:file[12]$")
-        self.assert_equal(sorted(os.listdir("output/input")), ["file2"])
-        shutil.rmtree("output/input")
-
-        # Multiple pattern
-        with changedir("output"):
-            self.cmd(
-                f"--repo={self.repository_location}", "extract", "test", "fm:input/file1", "fm:*file33*", "input/file2"
-            )
-        self.assert_equal(sorted(os.listdir("output/input")), ["file1", "file2", "file333"])
-
-    def test_extract_list_output(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file", size=1024 * 80)
-
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-
-        with changedir("output"):
-            output = self.cmd(f"--repo={self.repository_location}", "extract", "test")
-        self.assert_not_in("input/file", output)
-        shutil.rmtree("output/input")
-
-        with changedir("output"):
-            output = self.cmd(f"--repo={self.repository_location}", "extract", "test", "--info")
-        self.assert_not_in("input/file", output)
-        shutil.rmtree("output/input")
-
-        with changedir("output"):
-            output = self.cmd(f"--repo={self.repository_location}", "extract", "test", "--list")
-        self.assert_in("input/file", output)
-        shutil.rmtree("output/input")
+@pytest.mark.skipif(not are_symlinks_supported(), reason="symlinks not supported")
+def test_symlink_extract(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
 
 
-        with changedir("output"):
-            output = self.cmd(f"--repo={self.repository_location}", "extract", "test", "--list", "--info")
-        self.assert_in("input/file", output)
+    create_test_files(input_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+        assert os.readlink("input/link1") == "somewhere"
 
 
-    def test_extract_progress(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
 
 
-        with changedir("output"):
-            output = self.cmd(f"--repo={self.repository_location}", "extract", "test", "--progress")
-            assert "Extracting:" in output
-
-    def test_extract_pattern_opt(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("file2", size=1024 * 80)
-        self.create_regular_file("file_important", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        with changedir("output"):
-            self.cmd(
-                f"--repo={self.repository_location}",
-                "extract",
-                "test",
-                "--pattern=+input/file_important",
-                "--pattern=-input/file*",
-            )
-        self.assert_equal(sorted(os.listdir("output/input")), ["file_important"])
-
-    @pytest.mark.skipif(not xattr.XATTR_FAKEROOT, reason="Linux capabilities test, requires fakeroot >= 1.20.2")
-    def test_extract_capabilities(self):
-        fchown = os.fchown
-
-        # We need to patch chown manually to get the behaviour Linux has, since fakeroot does not
-        # accurately model the interaction of chown(2) and Linux capabilities, i.e. it does not remove them.
-        def patched_fchown(fd, uid, gid):
-            xattr.setxattr(fd, b"security.capability", b"", follow_symlinks=False)
-            fchown(fd, uid, gid)
-
-        # The capability descriptor used here is valid and taken from a /usr/bin/ping
-        capabilities = b"\x01\x00\x00\x02\x00 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
-        self.create_regular_file("file")
-        xattr.setxattr(b"input/file", b"security.capability", capabilities)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        with changedir("output"):
-            with patch.object(os, "fchown", patched_fchown):
-                self.cmd(f"--repo={self.repository_location}", "extract", "test")
-            assert xattr.getxattr(b"input/file", b"security.capability") == capabilities
-
-    @pytest.mark.skipif(
-        not xattr.XATTR_FAKEROOT, reason="xattr not supported on this system or on this version of fakeroot"
-    )
-    def test_extract_xattrs_errors(self):
-        def patched_setxattr_E2BIG(*args, **kwargs):
-            raise OSError(errno.E2BIG, "E2BIG")
-
-        def patched_setxattr_ENOTSUP(*args, **kwargs):
-            raise OSError(errno.ENOTSUP, "ENOTSUP")
-
-        def patched_setxattr_EACCES(*args, **kwargs):
-            raise OSError(errno.EACCES, "EACCES")
-
-        self.create_regular_file("file")
-        xattr.setxattr(b"input/file", b"user.attribute", b"value")
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "-e" "none")
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        with changedir("output"):
-            input_abspath = os.path.abspath("input/file")
-            with patch.object(xattr, "setxattr", patched_setxattr_E2BIG):
-                out = self.cmd(f"--repo={self.repository_location}", "extract", "test", exit_code=EXIT_WARNING)
-                assert "too big for this filesystem" in out
-                assert "when setting extended attribute user.attribute" in out
-            os.remove(input_abspath)
-            with patch.object(xattr, "setxattr", patched_setxattr_ENOTSUP):
-                out = self.cmd(f"--repo={self.repository_location}", "extract", "test", exit_code=EXIT_WARNING)
-                assert "ENOTSUP" in out
-                assert "when setting extended attribute user.attribute" in out
-            os.remove(input_abspath)
-            with patch.object(xattr, "setxattr", patched_setxattr_EACCES):
-                out = self.cmd(f"--repo={self.repository_location}", "extract", "test", exit_code=EXIT_WARNING)
-                assert "EACCES" in out
-                assert "when setting extended attribute user.attribute" in out
-            assert os.path.isfile(input_abspath)
-
-    @pytest.mark.skipif(not is_darwin, reason="only for macOS")
-    def test_extract_xattrs_resourcefork(self):
-        self.create_regular_file("file")
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "-e" "none")
-        input_path = os.path.abspath("input/file")
-        xa_key, xa_value = b"com.apple.ResourceFork", b"whatshouldbehere"  # issue #7234
-        xattr.setxattr(input_path.encode(), xa_key, xa_value)
-        birthtime_expected = os.stat(input_path).st_birthtime
-        mtime_expected = os.stat(input_path).st_mtime_ns
-        # atime_expected = os.stat(input_path).st_atime_ns
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-            extracted_path = os.path.abspath("input/file")
-            birthtime_extracted = os.stat(extracted_path).st_birthtime
-            mtime_extracted = os.stat(extracted_path).st_mtime_ns
-            # atime_extracted = os.stat(extracted_path).st_atime_ns
-            xa_value_extracted = xattr.getxattr(extracted_path.encode(), xa_key)
-        assert xa_value_extracted == xa_value
-        # cope with small birthtime deviations of less than 1000ns:
-        assert -1000 <= (birthtime_extracted - birthtime_expected) * 1e9 <= 1000
-        assert mtime_extracted == mtime_expected
-        # assert atime_extracted == atime_expected  # still broken, but not really important.
-
-    def test_overwrite(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("dir2/file2", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        # Overwriting regular files and directories should be supported
-        os.mkdir("output/input")
-        os.mkdir("output/input/file1")
-        os.mkdir("output/input/dir2")
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-        self.assert_dirs_equal("input", "output/input")
-        # But non-empty dirs should fail
-        os.unlink("output/input/file1")
-        os.mkdir("output/input/file1")
-        os.mkdir("output/input/file1/dir")
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test", exit_code=1)
-
-    # derived from test_extract_xattrs_errors()
-    @pytest.mark.skipif(
-        not xattr.XATTR_FAKEROOT, reason="xattr not supported on this system or on this version of fakeroot"
-    )
-    def test_do_not_fail_when_percent_is_in_xattr_name(self):
-        """https://github.com/borgbackup/borg/issues/6063"""
-
-        def patched_setxattr_EACCES(*args, **kwargs):
-            raise OSError(errno.EACCES, "EACCES")
-
-        self.create_regular_file("file")
-        xattr.setxattr(b"input/file", b"user.attribute%p", b"value")
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "-e" "none")
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        with changedir("output"):
-            with patch.object(xattr, "setxattr", patched_setxattr_EACCES):
-                self.cmd(f"--repo={self.repository_location}", "extract", "test", exit_code=EXIT_WARNING)
-
-    # derived from test_extract_xattrs_errors()
-    @pytest.mark.skipif(
-        not xattr.XATTR_FAKEROOT, reason="xattr not supported on this system or on this version of fakeroot"
-    )
-    def test_do_not_fail_when_percent_is_in_file_name(self):
-        """https://github.com/borgbackup/borg/issues/6063"""
-
-        def patched_setxattr_EACCES(*args, **kwargs):
-            raise OSError(errno.EACCES, "EACCES")
-
-        os.makedirs(os.path.join(self.input_path, "dir%p"))
-        xattr.setxattr(b"input/dir%p", b"user.attribute", b"value")
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "-e" "none")
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        with changedir("output"):
-            with patch.object(xattr, "setxattr", patched_setxattr_EACCES):
-                self.cmd(f"--repo={self.repository_location}", "extract", "test", exit_code=EXIT_WARNING)
-
-    def test_extract_continue(self):
-        CONTENTS1, CONTENTS2, CONTENTS3 = b"contents1" * 100, b"contents2" * 200, b"contents3" * 300
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", contents=CONTENTS1)
-        self.create_regular_file("file2", contents=CONTENTS2)
-        self.create_regular_file("file3", contents=CONTENTS3)
-        self.cmd(f"--repo={self.repository_location}", "create", "arch", "input")
-        with changedir("output"):
-            # we simulate an interrupted/partial extraction:
-            self.cmd(f"--repo={self.repository_location}", "extract", "arch")
-            # do not modify file1, it stands for a successfully extracted file
-            file1_st = os.stat("input/file1")
-            # simulate a partially extracted file2 (smaller size, archived mtime not yet set)
-            file2_st = os.stat("input/file2")
-            # make a hardlink, so it does not free the inode when unlinking input/file2
-            os.link("input/file2", "hardlink-to-keep-inode-f2")
-            os.truncate("input/file2", 123)  # -> incorrect size, incorrect mtime
-            # simulate file3 has not yet been extracted
-            file3_st = os.stat("input/file3")
-            # make a hardlink, so it does not free the inode when unlinking input/file3
-            os.link("input/file3", "hardlink-to-keep-inode-f3")
-            os.remove("input/file3")
-        time.sleep(1)  # needed due to timestamp granularity of apple hfs+
-        with changedir("output"):
-            # now try to continue extracting, using the same archive, same output dir:
-            self.cmd(f"--repo={self.repository_location}", "extract", "arch", "--continue")
-            now_file1_st = os.stat("input/file1")
-            assert file1_st.st_ino == now_file1_st.st_ino  # file1 was NOT extracted again
-            assert file1_st.st_mtime_ns == now_file1_st.st_mtime_ns  # has correct mtime
-            new_file2_st = os.stat("input/file2")
-            assert file2_st.st_ino != new_file2_st.st_ino  # file2 was extracted again
-            assert file2_st.st_mtime_ns == new_file2_st.st_mtime_ns  # has correct mtime
-            new_file3_st = os.stat("input/file3")
-            assert file3_st.st_ino != new_file3_st.st_ino  # file3 was extracted again
-            assert file3_st.st_mtime_ns == new_file3_st.st_mtime_ns  # has correct mtime
-            # windows has a strange ctime behaviour when deleting and recreating a file
-            if not is_win32:
-                assert file1_st.st_ctime_ns == now_file1_st.st_ctime_ns  # file not extracted again
-                assert file2_st.st_ctime_ns != new_file2_st.st_ctime_ns  # file extracted again
-                assert file3_st.st_ctime_ns != new_file3_st.st_ctime_ns  # file extracted again
-            # check if all contents (and thus also file sizes) are correct:
-            with open("input/file1", "rb") as f:
-                assert f.read() == CONTENTS1
-            with open("input/file2", "rb") as f:
-                assert f.read() == CONTENTS2
-            with open("input/file3", "rb") as f:
-                assert f.read() == CONTENTS3
-
-
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    """run the same tests, but with a remote repository"""
-
-
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    @unittest.skip("patches objects")
-    def test_extract_capabilities(self):
-        pass
-
-    @unittest.skip("patches objects")
-    def test_extract_xattrs_errors(self):
-        pass
-
-    @unittest.skip("test_overwrite seems incompatible with fakeroot and/or the binary.")
-    def test_overwrite(self):
-        pass
-
-    @unittest.skip("patches objects")
-    def test_do_not_fail_when_percent_is_in_xattr_name(self):
-        pass
-
-    @unittest.skip("patches objects")
-    def test_do_not_fail_when_percent_is_in_file_name(self):
-        pass
+@pytest.mark.skipif(
+    not are_symlinks_supported() or not are_hardlinks_supported() or is_darwin,
+    reason="symlinks or hardlinks or hardlinked symlinks not supported",
+)
+def test_hardlinked_symlinks_extract(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    create_regular_file(input_path, "target", size=1024)
+    with changedir("input"):
+        os.symlink("target", "symlink1")
+        os.link("symlink1", "symlink2", follow_symlinks=False)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    with changedir("output"):
+        output = cmd(archiver, f"--repo={repo_location}", "extract", "test")
+        print(output)
+        with changedir("input"):
+            assert os.path.exists("target")
+            assert os.readlink("symlink1") == "target"
+            assert os.readlink("symlink2") == "target"
+            st1 = os.stat("symlink1", follow_symlinks=False)
+            st2 = os.stat("symlink2", follow_symlinks=False)
+            assert st1.st_nlink == 2
+            assert st2.st_nlink == 2
+            assert st1.st_ino == st2.st_ino
+            assert st1.st_size == st2.st_size
+
+
+@pytest.mark.skipif(not is_utime_fully_supported(), reason="cannot properly setup and execute test without utime")
+def test_directory_timestamps1(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    create_test_files(input_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    # default file archiving order (internal recursion)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+    # extracting a file inside a directory touches the directory mtime
+    assert os.path.exists("output/input/dir2/file2")
+    # make sure borg fixes the directory mtime after touching it
+    sti = os.stat("input/dir2")
+    sto = os.stat("output/input/dir2")
+    assert same_ts_ns(sti.st_mtime_ns, sto.st_mtime_ns)
+
+
+@pytest.mark.skipif(not is_utime_fully_supported(), reason="cannot properly setup and execute test without utime")
+def test_directory_timestamps2(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    create_test_files(input_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    # given order, dir first, file second
+    flist_dir_first = b"input/dir2\ninput/dir2/file2\n"
+    cmd(archiver, f"--repo={repo_location}", "create", "--paths-from-stdin", "test", input=flist_dir_first)
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+    # extracting a file inside a directory touches the directory mtime
+    assert os.path.exists("output/input/dir2/file2")
+    # make sure borg fixes the directory mtime after touching it
+    sti = os.stat("input/dir2")
+    sto = os.stat("output/input/dir2")
+    assert same_ts_ns(sti.st_mtime_ns, sto.st_mtime_ns)
+
+
+@pytest.mark.skipif(not is_utime_fully_supported(), reason="cannot properly setup and execute test without utime")
+def test_directory_timestamps3(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    create_test_files(input_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    # given order, file first, dir second
+    flist_file_first = b"input/dir2/file2\ninput/dir2\n"
+    cmd(archiver, f"--repo={repo_location}", "create", "--paths-from-stdin", "test", input=flist_file_first)
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+    # extracting a file inside a directory touches the directory mtime
+    assert os.path.exists("output/input/dir2/file2")
+    # make sure borg fixes the directory mtime after touching it
+    sti = os.stat("input/dir2")
+    sto = os.stat("output/input/dir2")
+    assert same_ts_ns(sti.st_mtime_ns, sto.st_mtime_ns)
+
+
+@pytest.mark.skipif(not is_utime_fully_supported(), reason="cannot properly setup and execute test without utime")
+def test_atime(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    def has_noatime(some_file):
+        atime_before = os.stat(some_file).st_atime_ns
+        try:
+            with open(os.open(some_file, flags_noatime)) as file:
+                file.read()
+        except PermissionError:
+            return False
+        else:
+            atime_after = os.stat(some_file).st_atime_ns
+            noatime_used = flags_noatime != flags_normal
+            return noatime_used and atime_before == atime_after
+
+    create_test_files(input_path)
+    atime, mtime = 123456780, 234567890
+    have_noatime = has_noatime("input/file1")
+    os.utime("input/file1", (atime, mtime))
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "--atime", "test", "input")
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+    sti = os.stat("input/file1")
+    sto = os.stat("output/input/file1")
+    assert same_ts_ns(sti.st_mtime_ns, sto.st_mtime_ns)
+    assert same_ts_ns(sto.st_mtime_ns, mtime * 1e9)
+    if have_noatime:
+        assert same_ts_ns(sti.st_atime_ns, sto.st_atime_ns)
+        assert same_ts_ns(sto.st_atime_ns, atime * 1e9)
+    else:
+        # it touched the input file's atime while backing it up
+        assert same_ts_ns(sto.st_atime_ns, atime * 1e9)
+
+
+@pytest.mark.skipif(not is_utime_fully_supported(), reason="cannot setup and execute test without utime")
+@pytest.mark.skipif(not is_birthtime_fully_supported(), reason="cannot setup and execute test without birthtime")
+def test_birthtime(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    create_test_files(input_path)
+    birthtime, mtime, atime = 946598400, 946684800, 946771200
+    os.utime("input/file1", (atime, birthtime))
+    os.utime("input/file1", (atime, mtime))
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+    sti = os.stat("input/file1")
+    sto = os.stat("output/input/file1")
+    assert same_ts_ns(sti.st_birthtime * 1e9, sto.st_birthtime * 1e9)
+    assert same_ts_ns(sto.st_birthtime * 1e9, birthtime * 1e9)
+    assert same_ts_ns(sti.st_mtime_ns, sto.st_mtime_ns)
+    assert same_ts_ns(sto.st_mtime_ns, mtime * 1e9)
+
+
+@pytest.mark.skipif(is_win32, reason="frequent test failures on github CI on win32")
+def test_sparse_file(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+
+    def is_sparse(fn, total_size, hole_size):
+        st = os.stat(fn)
+        assert st.st_size == total_size
+        sparse = True
+        if sparse and hasattr(st, "st_blocks") and st.st_blocks * 512 >= st.st_size:
+            sparse = False
+        if sparse and has_seek_hole:
+            with open(fn, "rb") as fd:
+                # only check if the first hole is as expected, because the 2nd hole check
+                # is problematic on xfs due to its "dynamic speculative EOF pre-allocation
+                try:
+                    if fd.seek(0, os.SEEK_HOLE) != 0:
+                        sparse = False
+                    if fd.seek(0, os.SEEK_DATA) != hole_size:
+                        sparse = False
+                except OSError:
+                    # OS/FS does not really support SEEK_HOLE/SEEK_DATA
+                    sparse = False
+        return sparse
+
+    filename = os.path.join(archiver.input_path, "sparse")
+    content = b"foobar"
+    hole_size = 5 * (1 << CHUNK_MAX_EXP)  # 5 full chunker buffers
+    total_size = hole_size + len(content) + hole_size
+    with open(filename, "wb") as fd:
+        # create a file that has a hole at the beginning and end (if the
+        # OS and filesystem supports sparse files)
+        fd.seek(hole_size, 1)
+        fd.write(content)
+        fd.seek(hole_size, 1)
+        pos = fd.tell()
+        fd.truncate(pos)
+    # we first check if we could create a sparse input file:
+    sparse_support = is_sparse(filename, total_size, hole_size)
+    if sparse_support:
+        # we could create a sparse input file, so creating a backup of it and
+        # extracting it again (as sparse) should also work:
+        cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+        cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+        with changedir(archiver.output_path):
+            cmd(archiver, f"--repo={repo_location}", "extract", "test", "--sparse")
+        assert_dirs_equal("input", "output/input")
+        filename = os.path.join(archiver.output_path, "input", "sparse")
+        with open(filename, "rb") as fd:
+            # check if file contents are as expected
+            assert fd.read(hole_size) == b"\0" * hole_size
+            assert fd.read(len(content)) == content
+            assert fd.read(hole_size) == b"\0" * hole_size
+        assert is_sparse(filename, total_size, hole_size)
+
+
+def test_unusual_filenames(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    filenames = ["normal", "with some blanks", "(with_parens)"]
+
+    for filename in filenames:
+        filename = os.path.join(input_path, filename)
+        with open(filename, "wb"):
+            pass
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    for filename in filenames:
+        with changedir("output"):
+            cmd(archiver, f"--repo={repo_location}", "extract", "test", os.path.join("input", filename))
+        assert os.path.exists(os.path.join("output", "input", filename))
+
+
+def test_strip_components(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "dir/file")
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test", "--strip-components", "3")
+        assert not os.path.exists("file")
+        with assert_creates_file("file"):
+            cmd(archiver, f"--repo={repo_location}", "extract", "test", "--strip-components", "2")
+        with assert_creates_file("dir/file"):
+            cmd(archiver, f"--repo={repo_location}", "extract", "test", "--strip-components", "1")
+        with assert_creates_file("input/dir/file"):
+            cmd(archiver, f"--repo={repo_location}", "extract", "test", "--strip-components", "0")
+
+
+@requires_hardlinks
+def test_extract_hardlinks1(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    _extract_hardlinks_setup(archiver)
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+        assert os.stat("input/source").st_nlink == 4
+        assert os.stat("input/abba").st_nlink == 4
+        assert os.stat("input/dir1/hardlink").st_nlink == 4
+        assert os.stat("input/dir1/subdir/hardlink").st_nlink == 4
+        assert open("input/dir1/subdir/hardlink", "rb").read() == b"123456"
+
+
+@requires_hardlinks
+def test_extract_hardlinks2(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    _extract_hardlinks_setup(archiver)
+
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test", "--strip-components", "2")
+        assert os.stat("hardlink").st_nlink == 2
+        assert os.stat("subdir/hardlink").st_nlink == 2
+        assert open("subdir/hardlink", "rb").read() == b"123456"
+        assert os.stat("aaaa").st_nlink == 2
+        assert os.stat("source2").st_nlink == 2
+
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test", "input/dir1")
+        assert os.stat("input/dir1/hardlink").st_nlink == 2
+        assert os.stat("input/dir1/subdir/hardlink").st_nlink == 2
+        assert open("input/dir1/subdir/hardlink", "rb").read() == b"123456"
+        assert os.stat("input/dir1/aaaa").st_nlink == 2
+        assert os.stat("input/dir1/source2").st_nlink == 2
+
+
+@requires_hardlinks
+def test_extract_hardlinks_twice(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    # setup for #5603
+    path_a = os.path.join(archiver.input_path, "a")
+    path_b = os.path.join(archiver.input_path, "b")
+    os.mkdir(path_a)
+    os.mkdir(path_b)
+    hl_a = os.path.join(path_a, "hardlink")
+    hl_b = os.path.join(path_b, "hardlink")
+    create_regular_file(input_path, hl_a, contents=b"123456")
+    os.link(hl_a, hl_b)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input", "input")  # give input twice!
+    # now test extraction
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+        # if issue #5603 happens, extraction gives rc == 1 (triggering AssertionError) and warnings like:
+        # input/a/hardlink: link: [Errno 2] No such file or directory: 'input/a/hardlink' -> 'input/a/hardlink'
+        # input/b/hardlink: link: [Errno 2] No such file or directory: 'input/a/hardlink' -> 'input/b/hardlink'
+        # otherwise, when fixed, the hardlinks should be there and have a link count of 2
+        assert os.stat("input/a/hardlink").st_nlink == 2
+        assert os.stat("input/b/hardlink").st_nlink == 2
+
+
+def test_extract_include_exclude(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    create_regular_file(input_path, "file3", size=1024 * 80)
+    create_regular_file(input_path, "file4", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "create", "--exclude=input/file4", "test", "input")
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test", "input/file1")
+    assert sorted(os.listdir("output/input")) == ["file1"]
+
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test", "--exclude=input/file2")
+    assert sorted(os.listdir("output/input")) == ["file1", "file3"]
+
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test", "--exclude-from=" + archiver.exclude_file_path)
+    assert sorted(os.listdir("output/input")) == ["file1", "file3"]
+
+
+def test_extract_include_exclude_regex(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    create_regular_file(input_path, "file3", size=1024 * 80)
+    create_regular_file(input_path, "file4", size=1024 * 80)
+    create_regular_file(input_path, "file333", size=1024 * 80)
+
+    # Create with regular expression exclusion for file4
+    cmd(archiver, f"--repo={repo_location}", "create", "--exclude=re:input/file4$", "test", "input")
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+    assert sorted(os.listdir("output/input")) == ["file1", "file2", "file3", "file333"]
+    shutil.rmtree("output/input")
+
+    # Extract with regular expression exclusion
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test", "--exclude=re:file3+")
+    assert sorted(os.listdir("output/input")) == ["file1", "file2"]
+    shutil.rmtree("output/input")
+
+    # Combine --exclude with fnmatch and regular expression
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test", "--exclude=input/file2", "--exclude=re:file[01]")
+    assert sorted(os.listdir("output/input")) == ["file3", "file333"]
+    shutil.rmtree("output/input")
+
+    # Combine --exclude-from and regular expression exclusion
+    with changedir("output"):
+        cmd(
+            archiver,
+            f"--repo={repo_location}",
+            "extract",
+            "test",
+            "--exclude-from=" + archiver.exclude_file_path,
+            "--exclude=re:file1",
+            "--exclude=re:file(\\d)\\1\\1$",
+        )
+    assert sorted(os.listdir("output/input")) == ["file3"]
+
+
+def test_extract_include_exclude_regex_from_file(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    create_regular_file(input_path, "file3", size=1024 * 80)
+    create_regular_file(input_path, "file4", size=1024 * 80)
+    create_regular_file(input_path, "file333", size=1024 * 80)
+    # Create while excluding using mixed pattern styles
+    with open(archiver.exclude_file_path, "wb") as fd:
+        fd.write(b"re:input/file4$\n")
+        fd.write(b"fm:*file3*\n")
+
+    cmd(archiver, f"--repo={repo_location}", "create", "--exclude-from=" + archiver.exclude_file_path, "test", "input")
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+    assert sorted(os.listdir("output/input")) == ["file1", "file2"]
+    shutil.rmtree("output/input")
+
+    # Exclude using regular expression
+    with open(archiver.exclude_file_path, "wb") as fd:
+        fd.write(b"re:file3+\n")
+
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test", "--exclude-from=" + archiver.exclude_file_path)
+    assert sorted(os.listdir("output/input")) == ["file1", "file2"]
+    shutil.rmtree("output/input")
+
+    # Mixed exclude pattern styles
+    with open(archiver.exclude_file_path, "wb") as fd:
+        fd.write(b"re:file(\\d)\\1\\1$\n")
+        fd.write(b"fm:nothingwillmatchthis\n")
+        fd.write(b"*/file1\n")
+        fd.write(b"re:file2$\n")
+
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test", "--exclude-from=" + archiver.exclude_file_path)
+    assert sorted(os.listdir("output/input")) == []
+
+
+def test_extract_with_pattern(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    create_regular_file(input_path, "file3", size=1024 * 80)
+    create_regular_file(input_path, "file4", size=1024 * 80)
+    create_regular_file(input_path, "file333", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+
+    # Extract everything with regular expression
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test", "re:.*")
+    assert sorted(os.listdir("output/input")) == ["file1", "file2", "file3", "file333", "file4"]
+    shutil.rmtree("output/input")
+
+    # Extract with pattern while also excluding files
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "--exclude=re:file[34]$", "test", r"re:file\d$")
+    assert sorted(os.listdir("output/input")) == ["file1", "file2"]
+    shutil.rmtree("output/input")
+
+    # Combine --exclude with pattern for extraction
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "--exclude=input/file1", "test", "re:file[12]$")
+    assert sorted(os.listdir("output/input")) == ["file2"]
+    shutil.rmtree("output/input")
+
+    # Multiple pattern
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test", "fm:input/file1", "fm:*file33*", "input/file2")
+    assert sorted(os.listdir("output/input")) == ["file1", "file2", "file333"]
+
+
+def test_extract_list_output(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+
+    with changedir("output"):
+        output = cmd(archiver, f"--repo={repo_location}", "extract", "test")
+    assert "input/file" not in output
+    shutil.rmtree("output/input")
+
+    with changedir("output"):
+        output = cmd(archiver, f"--repo={repo_location}", "extract", "test", "--info")
+    assert "input/file" not in output
+    shutil.rmtree("output/input")
+
+    with changedir("output"):
+        output = cmd(archiver, f"--repo={repo_location}", "extract", "test", "--list")
+    assert "input/file" in output
+    shutil.rmtree("output/input")
+
+    with changedir("output"):
+        output = cmd(archiver, f"--repo={repo_location}", "extract", "test", "--list", "--info")
+    assert "input/file" in output
+
+
+def test_extract_progress(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+
+    with changedir("output"):
+        output = cmd(archiver, f"--repo={repo_location}", "extract", "test", "--progress")
+        assert "Extracting:" in output
+
+
+def test_extract_pattern_opt(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "file2", size=1024 * 80)
+    create_regular_file(input_path, "file_important", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    with changedir("output"):
+        cmd(
+            archiver,
+            f"--repo={repo_location}",
+            "extract",
+            "test",
+            "--pattern=+input/file_important",
+            "--pattern=-input/file*",
+        )
+    assert sorted(os.listdir("output/input")) == ["file_important"]
+
+
+@pytest.mark.skipif(not xattr.XATTR_FAKEROOT, reason="Linux capabilities test, requires fakeroot >= 1.20.2")
+def test_extract_capabilities(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    if archiver.EXE:
+        pytest.skip("Skipping binary test due to patch objects")
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    fchown = os.fchown
+
+    # We need to patch chown manually to get the behaviour Linux has, since fakeroot does not
+    # accurately model the interaction of chown(2) and Linux capabilities, i.e. it does not remove them.
+    def patched_fchown(fd, uid, gid):
+        xattr.setxattr(fd, b"security.capability", b"", follow_symlinks=False)
+        fchown(fd, uid, gid)
+
+    # The capability descriptor used here is valid and taken from a /usr/bin/ping
+    capabilities = b"\x01\x00\x00\x02\x00 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
+    create_regular_file(input_path, "file")
+    xattr.setxattr(b"input/file", b"security.capability", capabilities)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    with changedir("output"):
+        with patch.object(os, "fchown", patched_fchown):
+            cmd(archiver, f"--repo={repo_location}", "extract", "test")
+        assert xattr.getxattr(b"input/file", b"security.capability") == capabilities
+
+
+@pytest.mark.skipif(not xattr.XATTR_FAKEROOT, reason="xattr not supported on this system, or this version of fakeroot")
+def test_extract_xattrs_errors(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    if archiver.EXE:
+        pytest.skip("Skipping binary test due to patch objects")
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    def patched_setxattr_E2BIG(*args, **kwargs):
+        raise OSError(errno.E2BIG, "E2BIG")
+
+    def patched_setxattr_ENOTSUP(*args, **kwargs):
+        raise OSError(errno.ENOTSUP, "ENOTSUP")
+
+    def patched_setxattr_EACCES(*args, **kwargs):
+        raise OSError(errno.EACCES, "EACCES")
+
+    create_regular_file(input_path, "file")
+    xattr.setxattr(b"input/file", b"user.attribute", b"value")
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "-e" "none")
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    with changedir("output"):
+        input_abspath = os.path.abspath("input/file")
+        with patch.object(xattr, "setxattr", patched_setxattr_E2BIG):
+            out = cmd(archiver, f"--repo={repo_location}", "extract", "test", exit_code=EXIT_WARNING)
+            assert "too big for this filesystem" in out
+            assert "when setting extended attribute user.attribute" in out
+        os.remove(input_abspath)
+
+        with patch.object(xattr, "setxattr", patched_setxattr_ENOTSUP):
+            out = cmd(archiver, f"--repo={repo_location}", "extract", "test", exit_code=EXIT_WARNING)
+            assert "ENOTSUP" in out
+            assert "when setting extended attribute user.attribute" in out
+        os.remove(input_abspath)
+
+        with patch.object(xattr, "setxattr", patched_setxattr_EACCES):
+            out = cmd(archiver, f"--repo={repo_location}", "extract", "test", exit_code=EXIT_WARNING)
+            assert "EACCES" in out
+            assert "when setting extended attribute user.attribute" in out
+        assert os.path.isfile(input_abspath)
+
+
+@pytest.mark.skipif(not is_darwin, reason="only for macOS")
+def test_extract_xattrs_resourcefork(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    create_regular_file(input_path, "file")
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "-e" "none")
+    input_path = os.path.abspath("input/file")
+    xa_key, xa_value = b"com.apple.ResourceFork", b"whatshouldbehere"  # issue #7234
+    xattr.setxattr(input_path.encode(), xa_key, xa_value)
+    birthtime_expected = os.stat(input_path).st_birthtime
+    mtime_expected = os.stat(input_path).st_mtime_ns
+    # atime_expected = os.stat(input_path).st_atime_ns
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+        extracted_path = os.path.abspath("input/file")
+        birthtime_extracted = os.stat(extracted_path).st_birthtime
+        mtime_extracted = os.stat(extracted_path).st_mtime_ns
+        # atime_extracted = os.stat(extracted_path).st_atime_ns
+        xa_value_extracted = xattr.getxattr(extracted_path.encode(), xa_key)
+    assert xa_value_extracted == xa_value
+    # cope with small birthtime deviations of less than 1000ns:
+    assert -1000 <= (birthtime_extracted - birthtime_expected) * 1e9 <= 1000
+    assert mtime_extracted == mtime_expected
+    # assert atime_extracted == atime_expected  # still broken, but not really important.
+
+
+def test_overwrite(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    if archiver.EXE:
+        pytest.skip("Test_overwrite seems incompatible with fakeroot and/or the binary.")
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "dir2/file2", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+
+    # Overwriting regular files and directories should be supported
+    os.mkdir("output/input")
+    os.mkdir("output/input/file1")
+    os.mkdir("output/input/dir2")
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+    assert_dirs_equal("input", "output/input")
+
+    # But non-empty dirs should fail
+    os.unlink("output/input/file1")
+    os.mkdir("output/input/file1")
+    os.mkdir("output/input/file1/dir")
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test", exit_code=1)
+
+
+# derived from test_extract_xattrs_errors()
+@pytest.mark.skipif(not xattr.XATTR_FAKEROOT, reason="xattr not supported on this system, or this version of fakeroot")
+def test_do_not_fail_when_percent_is_in_xattr_name(archivers, request):
+    """https://github.com/borgbackup/borg/issues/6063"""
+    archiver = request.getfixturevalue(archivers)
+    if archiver.EXE:
+        pytest.skip("Skipping binary test due to patch objects")
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    def patched_setxattr_EACCES(*args, **kwargs):
+        raise OSError(errno.EACCES, "EACCES")
+
+    create_regular_file(input_path, "file")
+    xattr.setxattr(b"input/file", b"user.attribute%p", b"value")
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "-e" "none")
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    with changedir("output"):
+        with patch.object(xattr, "setxattr", patched_setxattr_EACCES):
+            cmd(archiver, f"--repo={repo_location}", "extract", "test", exit_code=EXIT_WARNING)
+
+
+# derived from test_extract_xattrs_errors()
+@pytest.mark.skipif(not xattr.XATTR_FAKEROOT, reason="xattr not supported on this system, or this version of fakeroot")
+def test_do_not_fail_when_percent_is_in_file_name(archivers, request):
+    """https://github.com/borgbackup/borg/issues/6063"""
+    archiver = request.getfixturevalue(archivers)
+    if archiver.EXE:
+        pytest.skip("Skipping binary test due to patch objects")
+    repo_location = archiver.repository_location
+
+    def patched_setxattr_EACCES(*args, **kwargs):
+        raise OSError(errno.EACCES, "EACCES")
+
+    os.makedirs(os.path.join(archiver.input_path, "dir%p"))
+    xattr.setxattr(b"input/dir%p", b"user.attribute", b"value")
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "-e" "none")
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    with changedir("output"):
+        with patch.object(xattr, "setxattr", patched_setxattr_EACCES):
+            cmd(archiver, f"--repo={repo_location}", "extract", "test", exit_code=EXIT_WARNING)
+
+
+def test_extract_continue(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    CONTENTS1, CONTENTS2, CONTENTS3 = b"contents1" * 100, b"contents2" * 200, b"contents3" * 300
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", contents=CONTENTS1)
+    create_regular_file(input_path, "file2", contents=CONTENTS2)
+    create_regular_file(input_path, "file3", contents=CONTENTS3)
+    cmd(archiver, f"--repo={repo_location}", "create", "arch", "input")
+    with changedir("output"):
+        # we simulate an interrupted/partial extraction:
+        cmd(archiver, f"--repo={repo_location}", "extract", "arch")
+        # do not modify file1, it stands for a successfully extracted file
+        file1_st = os.stat("input/file1")
+        # simulate a partially extracted file2 (smaller size, archived mtime not yet set)
+        file2_st = os.stat("input/file2")
+        # make a hardlink, so it does not free the inode when unlinking input/file2
+        os.link("input/file2", "hardlink-to-keep-inode-f2")
+        os.truncate("input/file2", 123)  # -> incorrect size, incorrect mtime
+        # simulate file3 has not yet been extracted
+        file3_st = os.stat("input/file3")
+        # make a hardlink, so it does not free the inode when unlinking input/file3
+        os.link("input/file3", "hardlink-to-keep-inode-f3")
+        os.remove("input/file3")
+    time.sleep(1)  # needed due to timestamp granularity of apple hfs+
+
+    with changedir("output"):
+        # now try to continue extracting, using the same archive, same output dir:
+        cmd(archiver, f"--repo={repo_location}", "extract", "arch", "--continue")
+        now_file1_st = os.stat("input/file1")
+        assert file1_st.st_ino == now_file1_st.st_ino  # file1 was NOT extracted again
+        assert file1_st.st_mtime_ns == now_file1_st.st_mtime_ns  # has correct mtime
+        new_file2_st = os.stat("input/file2")
+        assert file2_st.st_ino != new_file2_st.st_ino  # file2 was extracted again
+        assert file2_st.st_mtime_ns == new_file2_st.st_mtime_ns  # has correct mtime
+        new_file3_st = os.stat("input/file3")
+        assert file3_st.st_ino != new_file3_st.st_ino  # file3 was extracted again
+        assert file3_st.st_mtime_ns == new_file3_st.st_mtime_ns  # has correct mtime
+        # windows has a strange ctime behaviour when deleting and recreating a file
+        if not is_win32:
+            assert file1_st.st_ctime_ns == now_file1_st.st_ctime_ns  # file not extracted again
+            assert file2_st.st_ctime_ns != new_file2_st.st_ctime_ns  # file extracted again
+            assert file3_st.st_ctime_ns != new_file3_st.st_ctime_ns  # file extracted again
+        # check if all contents (and thus also file sizes) are correct:
+        with open("input/file1", "rb") as f:
+            assert f.read() == CONTENTS1
+        with open("input/file2", "rb") as f:
+            assert f.read() == CONTENTS2
+        with open("input/file3", "rb") as f:
+            assert f.read() == CONTENTS3

+ 17 - 19
src/borg/testsuite/archiver/help_cmd.py

@@ -2,26 +2,11 @@ import pytest
 
 
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
 from ...helpers.nanorst import RstToTextLazy, rst_to_terminal
 from ...helpers.nanorst import RstToTextLazy, rst_to_terminal
-from . import ArchiverTestCaseBase, Archiver
-
-
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_usage(self):
-        self.cmd()
-        self.cmd("-h")
-
-    def test_help(self):
-        assert "Borg" in self.cmd("help")
-        assert "patterns" in self.cmd("help", "patterns")
-        assert "creates a new, empty repository" in self.cmd("help", "rcreate")
-        assert "positional arguments" not in self.cmd("help", "rcreate", "--epilog-only")
-        assert "creates a new, empty repository" not in self.cmd("help", "rcreate", "--usage-only")
+from . import Archiver, cmd
 
 
 
 
 def get_all_parsers():
 def get_all_parsers():
-    """
-    Return dict mapping command to parser.
-    """
+    # Return dict mapping command to parser.
     parser = Archiver(prog="borg").build_parser()
     parser = Archiver(prog="borg").build_parser()
     borgfs_parser = Archiver(prog="borgfs").build_parser()
     borgfs_parser = Archiver(prog="borgfs").build_parser()
     parsers = {}
     parsers = {}
@@ -30,8 +15,8 @@ def get_all_parsers():
         choices = {}
         choices = {}
         for action in parser._actions:
         for action in parser._actions:
             if action.choices is not None and "SubParsersAction" in str(action.__class__):
             if action.choices is not None and "SubParsersAction" in str(action.__class__):
-                for cmd, parser in action.choices.items():
-                    choices[prefix + cmd] = parser
+                for command, parser in action.choices.items():
+                    choices[prefix + command] = parser
         if extra_choices is not None:
         if extra_choices is not None:
             choices.update(extra_choices)
             choices.update(extra_choices)
         if prefix and not choices:
         if prefix and not choices:
@@ -45,6 +30,19 @@ def get_all_parsers():
     return parsers
     return parsers
 
 
 
 
+def test_usage(archiver):
+    cmd(archiver)
+    cmd(archiver, "-h")
+
+
+def test_help(archiver):
+    assert "Borg" in cmd(archiver, "help")
+    assert "patterns" in cmd(archiver, "help", "patterns")
+    assert "creates a new, empty repository" in cmd(archiver, "help", "rcreate")
+    assert "positional arguments" not in cmd(archiver, "help", "rcreate", "--epilog-only")
+    assert "creates a new, empty repository" not in cmd(archiver, "help", "rcreate", "--usage-only")
+
+
 @pytest.mark.parametrize("command, parser", list(get_all_parsers().items()))
 @pytest.mark.parametrize("command, parser", list(get_all_parsers().items()))
 def test_help_formatting(command, parser):
 def test_help_formatting(command, parser):
     if isinstance(parser.epilog, RstToTextLazy):
     if isinstance(parser.epilog, RstToTextLazy):

+ 46 - 54
src/borg/testsuite/archiver/info_cmd.py

@@ -1,58 +1,50 @@
 import json
 import json
 import os
 import os
-import unittest
 
 
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
-from . import (
-    ArchiverTestCaseBase,
-    RemoteArchiverTestCaseBase,
-    ArchiverTestCaseBinaryBase,
-    RK_ENCRYPTION,
-    checkts,
-    BORG_EXES,
-)
-
-
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_info(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        info_archive = self.cmd(f"--repo={self.repository_location}", "info", "-a", "test")
-        assert "Archive name: test" + os.linesep in info_archive
-        info_archive = self.cmd(f"--repo={self.repository_location}", "info", "--first", "1")
-        assert "Archive name: test" + os.linesep in info_archive
-
-    def test_info_json(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-
-        info_archive = json.loads(self.cmd(f"--repo={self.repository_location}", "info", "-a", "test", "--json"))
-        archives = info_archive["archives"]
-        assert len(archives) == 1
-        archive = archives[0]
-        assert archive["name"] == "test"
-        assert isinstance(archive["command_line"], str)
-        assert isinstance(archive["duration"], float)
-        assert len(archive["id"]) == 64
-        assert "stats" in archive
-        checkts(archive["start"])
-        checkts(archive["end"])
-
-    def test_info_json_of_empty_archive(self):
-        """See https://github.com/borgbackup/borg/issues/6120"""
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        info_repo = json.loads(self.cmd(f"--repo={self.repository_location}", "info", "--json", "--first=1"))
-        assert info_repo["archives"] == []
-        info_repo = json.loads(self.cmd(f"--repo={self.repository_location}", "info", "--json", "--last=1"))
-        assert info_repo["archives"] == []
-
-
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    """run the same tests, but with a remote repository"""
-
-
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    """runs the same tests, but via the borg binary"""
+from . import cmd, checkts, create_regular_file, generate_archiver_tests, RK_ENCRYPTION
+
+pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary")  # NOQA
+
+
+def test_info(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    info_archive = cmd(archiver, f"--repo={repo_location}", "info", "-a", "test")
+    assert "Archive name: test" + os.linesep in info_archive
+    info_archive = cmd(archiver, f"--repo={repo_location}", "info", "--first", "1")
+    assert "Archive name: test" + os.linesep in info_archive
+
+
+def test_info_json(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+
+    info_archive = json.loads(cmd(archiver, f"--repo={repo_location}", "info", "-a", "test", "--json"))
+    archives = info_archive["archives"]
+    assert len(archives) == 1
+    archive = archives[0]
+    assert archive["name"] == "test"
+    assert isinstance(archive["command_line"], str)
+    assert isinstance(archive["duration"], float)
+    assert len(archive["id"]) == 64
+    assert "stats" in archive
+    checkts(archive["start"])
+    checkts(archive["end"])
+
+
+def test_info_json_of_empty_archive(archivers, request):
+    """See https://github.com/borgbackup/borg/issues/6120"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    info_repo = json.loads(cmd(archiver, f"--repo={repo_location}", "info", "--json", "--first=1"))
+    assert info_repo["archives"] == []
+    info_repo = json.loads(cmd(archiver, f"--repo={repo_location}", "info", "--json", "--last=1"))
+    assert info_repo["archives"] == []

+ 255 - 227
src/borg/testsuite/archiver/key_cmds.py

@@ -1,5 +1,4 @@
 import os
 import os
-import unittest
 from binascii import unhexlify, b2a_base64, a2b_base64
 from binascii import unhexlify, b2a_base64, a2b_base64
 
 
 import pytest
 import pytest
@@ -13,294 +12,323 @@ from ...helpers import msgpack
 from ...repository import Repository
 from ...repository import Repository
 from .. import environment_variable
 from .. import environment_variable
 from .. import key
 from .. import key
-from . import (
-    ArchiverTestCaseBase,
-    ArchiverTestCaseBinaryBase,
-    RemoteArchiverTestCaseBase,
-    RK_ENCRYPTION,
-    KF_ENCRYPTION,
-    BORG_EXES,
-)
-
-
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_change_passphrase(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        os.environ["BORG_NEW_PASSPHRASE"] = "newpassphrase"
-        # here we have both BORG_PASSPHRASE and BORG_NEW_PASSPHRASE set:
-        self.cmd(f"--repo={self.repository_location}", "key", "change-passphrase")
-        os.environ["BORG_PASSPHRASE"] = "newpassphrase"
-        self.cmd(f"--repo={self.repository_location}", "rlist")
-
-    def test_change_location_to_keyfile(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        log = self.cmd(f"--repo={self.repository_location}", "rinfo")
-        assert "(repokey" in log
-        self.cmd(f"--repo={self.repository_location}", "key", "change-location", "keyfile")
-        log = self.cmd(f"--repo={self.repository_location}", "rinfo")
-        assert "(key file" in log
+from . import RK_ENCRYPTION, KF_ENCRYPTION, cmd, _extract_repository_id, _set_repository_id, generate_archiver_tests
 
 
-    def test_change_location_to_b2keyfile(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=repokey-blake2-aes-ocb")
-        log = self.cmd(f"--repo={self.repository_location}", "rinfo")
-        assert "(repokey BLAKE2b" in log
-        self.cmd(f"--repo={self.repository_location}", "key", "change-location", "keyfile")
-        log = self.cmd(f"--repo={self.repository_location}", "rinfo")
-        assert "(key file BLAKE2b" in log
+pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary")  # NOQA
 
 
-    def test_change_location_to_repokey(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", KF_ENCRYPTION)
-        log = self.cmd(f"--repo={self.repository_location}", "rinfo")
-        assert "(key file" in log
-        self.cmd(f"--repo={self.repository_location}", "key", "change-location", "repokey")
-        log = self.cmd(f"--repo={self.repository_location}", "rinfo")
-        assert "(repokey" in log
 
 
-    def test_change_location_to_b2repokey(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=keyfile-blake2-aes-ocb")
-        log = self.cmd(f"--repo={self.repository_location}", "rinfo")
-        assert "(key file BLAKE2b" in log
-        self.cmd(f"--repo={self.repository_location}", "key", "change-location", "repokey")
-        log = self.cmd(f"--repo={self.repository_location}", "rinfo")
-        assert "(repokey BLAKE2b" in log
+def test_change_passphrase(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    os.environ["BORG_NEW_PASSPHRASE"] = "newpassphrase"
+    # here we have both BORG_PASSPHRASE and BORG_NEW_PASSPHRASE set:
+    cmd(archiver, f"--repo={repo_location}", "key", "change-passphrase")
+    os.environ["BORG_PASSPHRASE"] = "newpassphrase"
+    cmd(archiver, f"--repo={repo_location}", "rlist")
 
 
-    def test_key_export_keyfile(self):
-        export_file = self.output_path + "/exported"
-        self.cmd(f"--repo={self.repository_location}", "rcreate", KF_ENCRYPTION)
-        repo_id = self._extract_repository_id(self.repository_path)
-        self.cmd(f"--repo={self.repository_location}", "key", "export", export_file)
 
 
-        with open(export_file) as fd:
-            export_contents = fd.read()
+def test_change_location_to_keyfile(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    log = cmd(archiver, f"--repo={repo_location}", "rinfo")
+    assert "(repokey" in log
+    cmd(archiver, f"--repo={repo_location}", "key", "change-location", "keyfile")
+    log = cmd(archiver, f"--repo={repo_location}", "rinfo")
+    assert "(key file" in log
 
 
-        assert export_contents.startswith("BORG_KEY " + bin_to_hex(repo_id) + "\n")
 
 
-        key_file = self.keys_path + "/" + os.listdir(self.keys_path)[0]
+def test_change_location_to_b2keyfile(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=repokey-blake2-aes-ocb")
+    log = cmd(archiver, f"--repo={repo_location}", "rinfo")
+    assert "(repokey BLAKE2b" in log
+    cmd(archiver, f"--repo={repo_location}", "key", "change-location", "keyfile")
+    log = cmd(archiver, f"--repo={repo_location}", "rinfo")
+    assert "(key file BLAKE2b" in log
 
 
-        with open(key_file) as fd:
-            key_contents = fd.read()
 
 
-        assert key_contents == export_contents
+def test_change_location_to_repokey(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", KF_ENCRYPTION)
+    log = cmd(archiver, f"--repo={repo_location}", "rinfo")
+    assert "(key file" in log
+    cmd(archiver, f"--repo={repo_location}", "key", "change-location", "repokey")
+    log = cmd(archiver, f"--repo={repo_location}", "rinfo")
+    assert "(repokey" in log
 
 
-        os.unlink(key_file)
 
 
-        self.cmd(f"--repo={self.repository_location}", "key", "import", export_file)
+def test_change_location_to_b2repokey(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=keyfile-blake2-aes-ocb")
+    log = cmd(archiver, f"--repo={repo_location}", "rinfo")
+    assert "(key file BLAKE2b" in log
+    cmd(archiver, f"--repo={repo_location}", "key", "change-location", "repokey")
+    log = cmd(archiver, f"--repo={repo_location}", "rinfo")
+    assert "(repokey BLAKE2b" in log
 
 
-        with open(key_file) as fd:
-            key_contents2 = fd.read()
 
 
-        assert key_contents2 == key_contents
+def test_key_export_keyfile(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, keys_path = archiver.repository_location, archiver.repository_path, archiver.keys_path
+    export_file = archiver.output_path + "/exported"
+    cmd(archiver, f"--repo={repo_location}", "rcreate", KF_ENCRYPTION)
+    repo_id = _extract_repository_id(repo_path)
+    cmd(archiver, f"--repo={repo_location}", "key", "export", export_file)
 
 
-    def test_key_import_keyfile_with_borg_key_file(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", KF_ENCRYPTION)
+    with open(export_file) as fd:
+        export_contents = fd.read()
 
 
-        exported_key_file = os.path.join(self.output_path, "exported")
-        self.cmd(f"--repo={self.repository_location}", "key", "export", exported_key_file)
+    assert export_contents.startswith("BORG_KEY " + bin_to_hex(repo_id) + "\n")
 
 
-        key_file = os.path.join(self.keys_path, os.listdir(self.keys_path)[0])
-        with open(key_file) as fd:
-            key_contents = fd.read()
-        os.unlink(key_file)
+    key_file = keys_path + "/" + os.listdir(keys_path)[0]
 
 
-        imported_key_file = os.path.join(self.output_path, "imported")
-        with environment_variable(BORG_KEY_FILE=imported_key_file):
-            self.cmd(f"--repo={self.repository_location}", "key", "import", exported_key_file)
-        assert not os.path.isfile(key_file), '"borg key import" should respect BORG_KEY_FILE'
+    with open(key_file) as fd:
+        key_contents = fd.read()
 
 
-        with open(imported_key_file) as fd:
-            imported_key_contents = fd.read()
-        assert imported_key_contents == key_contents
+    assert key_contents == export_contents
 
 
-    def test_key_export_repokey(self):
-        export_file = self.output_path + "/exported"
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        repo_id = self._extract_repository_id(self.repository_path)
-        self.cmd(f"--repo={self.repository_location}", "key", "export", export_file)
+    os.unlink(key_file)
 
 
-        with open(export_file) as fd:
-            export_contents = fd.read()
+    cmd(archiver, f"--repo={repo_location}", "key", "import", export_file)
 
 
-        assert export_contents.startswith("BORG_KEY " + bin_to_hex(repo_id) + "\n")
+    with open(key_file) as fd:
+        key_contents2 = fd.read()
 
 
-        with Repository(self.repository_path) as repository:
-            repo_key = AESOCBRepoKey(repository)
-            repo_key.load(None, Passphrase.env_passphrase())
+    assert key_contents2 == key_contents
 
 
-        backup_key = AESOCBKeyfileKey(key.TestKey.MockRepository())
-        backup_key.load(export_file, Passphrase.env_passphrase())
 
 
-        assert repo_key.crypt_key == backup_key.crypt_key
+def test_key_import_keyfile_with_borg_key_file(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, keys_path, output_path = archiver.repository_location, archiver.keys_path, archiver.output_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", KF_ENCRYPTION)
 
 
-        with Repository(self.repository_path) as repository:
-            repository.save_key(b"")
+    exported_key_file = os.path.join(output_path, "exported")
+    cmd(archiver, f"--repo={repo_location}", "key", "export", exported_key_file)
 
 
-        self.cmd(f"--repo={self.repository_location}", "key", "import", export_file)
+    key_file = os.path.join(keys_path, os.listdir(keys_path)[0])
+    with open(key_file) as fd:
+        key_contents = fd.read()
+    os.unlink(key_file)
 
 
-        with Repository(self.repository_path) as repository:
-            repo_key2 = AESOCBRepoKey(repository)
-            repo_key2.load(None, Passphrase.env_passphrase())
+    imported_key_file = os.path.join(output_path, "imported")
+    with environment_variable(BORG_KEY_FILE=imported_key_file):
+        cmd(archiver, f"--repo={repo_location}", "key", "import", exported_key_file)
+    assert not os.path.isfile(key_file), '"borg key import" should respect BORG_KEY_FILE'
 
 
-        assert repo_key2.crypt_key == repo_key2.crypt_key
+    with open(imported_key_file) as fd:
+        imported_key_contents = fd.read()
+    assert imported_key_contents == key_contents
 
 
-    def test_key_export_qr(self):
-        export_file = self.output_path + "/exported.html"
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        repo_id = self._extract_repository_id(self.repository_path)
-        self.cmd(f"--repo={self.repository_location}", "key", "export", "--qr-html", export_file)
 
 
-        with open(export_file, encoding="utf-8") as fd:
-            export_contents = fd.read()
+def test_key_export_repokey(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, output_path = archiver.repository_location, archiver.repository_path, archiver.output_path
+    export_file = output_path + "/exported"
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    repo_id = _extract_repository_id(repo_path)
+    cmd(archiver, f"--repo={repo_location}", "key", "export", export_file)
 
 
-        assert bin_to_hex(repo_id) in export_contents
-        assert export_contents.startswith("<!doctype html>")
-        assert export_contents.endswith("</html>\n")
+    with open(export_file) as fd:
+        export_contents = fd.read()
 
 
-    def test_key_export_directory(self):
-        export_directory = self.output_path + "/exported"
-        os.mkdir(export_directory)
+    assert export_contents.startswith("BORG_KEY " + bin_to_hex(repo_id) + "\n")
 
 
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
+    with Repository(repo_path) as repository:
+        repo_key = AESOCBRepoKey(repository)
+        repo_key.load(None, Passphrase.env_passphrase())
 
 
-        self.cmd(f"--repo={self.repository_location}", "key", "export", export_directory, exit_code=EXIT_ERROR)
+    backup_key = AESOCBKeyfileKey(key.TestKey.MockRepository())
+    backup_key.load(export_file, Passphrase.env_passphrase())
 
 
-    def test_key_export_qr_directory(self):
-        export_directory = self.output_path + "/exported"
-        os.mkdir(export_directory)
+    assert repo_key.crypt_key == backup_key.crypt_key
 
 
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
+    with Repository(repo_path) as repository:
+        repository.save_key(b"")
 
 
-        self.cmd(
-            f"--repo={self.repository_location}", "key", "export", "--qr-html", export_directory, exit_code=EXIT_ERROR
-        )
+    cmd(archiver, f"--repo={repo_location}", "key", "import", export_file)
 
 
-    def test_key_import_errors(self):
-        export_file = self.output_path + "/exported"
-        self.cmd(f"--repo={self.repository_location}", "rcreate", KF_ENCRYPTION)
+    with Repository(repo_path) as repository:
+        repo_key2 = AESOCBRepoKey(repository)
+        repo_key2.load(None, Passphrase.env_passphrase())
 
 
-        self.cmd(f"--repo={self.repository_location}", "key", "import", export_file, exit_code=EXIT_ERROR)
+    assert repo_key2.crypt_key == repo_key2.crypt_key
 
 
-        with open(export_file, "w") as fd:
-            fd.write("something not a key\n")
 
 
-        if self.FORK_DEFAULT:
-            self.cmd(f"--repo={self.repository_location}", "key", "import", export_file, exit_code=2)
-        else:
-            with pytest.raises(NotABorgKeyFile):
-                self.cmd(f"--repo={self.repository_location}", "key", "import", export_file)
+def test_key_export_qr(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, output_path = archiver.repository_location, archiver.repository_path, archiver.output_path
+    export_file = output_path + "/exported.html"
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    repo_id = _extract_repository_id(repo_path)
+    cmd(archiver, f"--repo={repo_location}", "key", "export", "--qr-html", export_file)
 
 
-        with open(export_file, "w") as fd:
-            fd.write("BORG_KEY a0a0a0\n")
+    with open(export_file, encoding="utf-8") as fd:
+        export_contents = fd.read()
 
 
-        if self.FORK_DEFAULT:
-            self.cmd(f"--repo={self.repository_location}", "key", "import", export_file, exit_code=2)
-        else:
-            with pytest.raises(RepoIdMismatch):
-                self.cmd(f"--repo={self.repository_location}", "key", "import", export_file)
+    assert bin_to_hex(repo_id) in export_contents
+    assert export_contents.startswith("<!doctype html>")
+    assert export_contents.endswith("</html>\n")
 
 
-    def test_key_export_paperkey(self):
-        repo_id = "e294423506da4e1ea76e8dcdf1a3919624ae3ae496fddf905610c351d3f09239"
 
 
-        export_file = self.output_path + "/exported"
-        self.cmd(f"--repo={self.repository_location}", "rcreate", KF_ENCRYPTION)
-        self._set_repository_id(self.repository_path, unhexlify(repo_id))
+def test_key_export_directory(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, output_path = archiver.repository_location, archiver.output_path
+    export_directory = output_path + "/exported"
+    os.mkdir(export_directory)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "key", "export", export_directory, exit_code=EXIT_ERROR)
 
 
-        key_file = self.keys_path + "/" + os.listdir(self.keys_path)[0]
 
 
-        with open(key_file, "w") as fd:
-            fd.write(CHPOKeyfileKey.FILE_ID + " " + repo_id + "\n")
-            fd.write(b2a_base64(b"abcdefghijklmnopqrstu").decode())
+def test_key_export_qr_directory(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, output_path = archiver.repository_location, archiver.output_path
+    export_directory = output_path + "/exported"
+    os.mkdir(export_directory)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "key", "export", "--qr-html", export_directory, exit_code=EXIT_ERROR)
 
 
-        self.cmd(f"--repo={self.repository_location}", "key", "export", "--paper", export_file)
 
 
-        with open(export_file) as fd:
-            export_contents = fd.read()
+def test_key_import_errors(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, output_path = archiver.repository_location, archiver.output_path
+    export_file = output_path + "/exported"
+    cmd(archiver, f"--repo={repo_location}", "rcreate", KF_ENCRYPTION)
 
 
-        assert (
-            export_contents
-            == """To restore key use borg key import --paper /path/to/repo
+    cmd(archiver, f"--repo={repo_location}", "key", "import", export_file, exit_code=EXIT_ERROR)
+
+    with open(export_file, "w") as fd:
+        fd.write("something not a key\n")
+
+    if archiver.FORK_DEFAULT:
+        cmd(archiver, f"--repo={repo_location}", "key", "import", export_file, exit_code=2)
+    else:
+        with pytest.raises(NotABorgKeyFile):
+            cmd(archiver, f"--repo={repo_location}", "key", "import", export_file)
+
+    with open(export_file, "w") as fd:
+        fd.write("BORG_KEY a0a0a0\n")
+
+    if archiver.FORK_DEFAULT:
+        cmd(archiver, f"--repo={repo_location}", "key", "import", export_file, exit_code=2)
+    else:
+        with pytest.raises(RepoIdMismatch):
+            cmd(archiver, f"--repo={repo_location}", "key", "import", export_file)
+
+
+def test_key_export_paperkey(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, output_path = archiver.repository_location, archiver.repository_path, archiver.output_path
+    repo_id = "e294423506da4e1ea76e8dcdf1a3919624ae3ae496fddf905610c351d3f09239"
+
+    export_file = output_path + "/exported"
+    cmd(archiver, f"--repo={repo_location}", "rcreate", KF_ENCRYPTION)
+    _set_repository_id(repo_path, unhexlify(repo_id))
+
+    key_file = archiver.keys_path + "/" + os.listdir(archiver.keys_path)[0]
+
+    with open(key_file, "w") as fd:
+        fd.write(CHPOKeyfileKey.FILE_ID + " " + repo_id + "\n")
+        fd.write(b2a_base64(b"abcdefghijklmnopqrstu").decode())
+
+    cmd(archiver, f"--repo={repo_location}", "key", "export", "--paper", export_file)
+
+    with open(export_file) as fd:
+        export_contents = fd.read()
+
+    assert (
+        export_contents
+        == """To restore key use borg key import --paper /path/to/repo
 
 
 BORG PAPER KEY v1
 BORG PAPER KEY v1
 id: 2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02
 id: 2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02
  1: 616263 646566 676869 6a6b6c 6d6e6f 707172 - 6d
  1: 616263 646566 676869 6a6b6c 6d6e6f 707172 - 6d
  2: 737475 - 88
  2: 737475 - 88
 """
 """
-        )
-
-    def test_key_import_paperkey(self):
-        repo_id = "e294423506da4e1ea76e8dcdf1a3919624ae3ae496fddf905610c351d3f09239"
-        self.cmd(f"--repo={self.repository_location}", "rcreate", KF_ENCRYPTION)
-        self._set_repository_id(self.repository_path, unhexlify(repo_id))
-
-        key_file = self.keys_path + "/" + os.listdir(self.keys_path)[0]
-        with open(key_file, "w") as fd:
-            fd.write(AESOCBKeyfileKey.FILE_ID + " " + repo_id + "\n")
-            fd.write(b2a_base64(b"abcdefghijklmnopqrstu").decode())
-
-        typed_input = (
-            b"2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41  02\n"  # Forgot to type "-"
-            b"2 / e29442 3506da 4e1ea7  25f62a 5a3d41 - 02\n"  # Forgot to type second "/"
-            b"2 / e29442 3506da 4e1ea7 / 25f62a 5a3d42 - 02\n"  # Typo (..42 not ..41)
-            b"2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02\n"  # Correct! Congratulations
-            b"616263 646566 676869 6a6b6c 6d6e6f 707172 - 6d\n"
-            b"\n\n"  # Abort [yN] => N
-            b"737475 88\n"  # missing "-"
-            b"73747i - 88\n"  # typo
-            b"73747 - 88\n"  # missing nibble
-            b"73 74 75  -  89\n"  # line checksum mismatch
-            b"00a1 - 88\n"  # line hash collision - overall hash mismatch, have to start over
-            b"2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02\n"
-            b"616263 646566 676869 6a6b6c 6d6e6f 707172 - 6d\n"
-            b"73 74 75  -  88\n"
-        )
-
-        # In case that this has to change, here is a quick way to find a colliding line hash:
-        #
-        # from hashlib import sha256
-        # hash_fn = lambda x: sha256(b'\x00\x02' + x).hexdigest()[:2]
-        # for i in range(1000):
-        #     if hash_fn(i.to_bytes(2, byteorder='big')) == '88':  # 88 = line hash
-        #         print(i.to_bytes(2, 'big'))
-        #         break
-
-        self.cmd(f"--repo={self.repository_location}", "key", "import", "--paper", input=typed_input)
-
-        # Test abort paths
-        typed_input = b"\ny\n"
-        self.cmd(f"--repo={self.repository_location}", "key", "import", "--paper", input=typed_input)
-        typed_input = b"2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02\n\ny\n"
-        self.cmd(f"--repo={self.repository_location}", "key", "import", "--paper", input=typed_input)
-
-    def test_init_defaults_to_argon2(self):
-        """https://github.com/borgbackup/borg/issues/747#issuecomment-1076160401"""
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        with Repository(self.repository_path) as repository:
-            key = msgpack.unpackb(a2b_base64(repository.load_key()))
+    )
+
+
+def test_key_import_paperkey(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, keys_path = archiver.repository_location, archiver.repository_path, archiver.keys_path
+    repo_id = "e294423506da4e1ea76e8dcdf1a3919624ae3ae496fddf905610c351d3f09239"
+    cmd(archiver, f"--repo={repo_location}", "rcreate", KF_ENCRYPTION)
+    _set_repository_id(repo_path, unhexlify(repo_id))
+
+    key_file = keys_path + "/" + os.listdir(keys_path)[0]
+    with open(key_file, "w") as fd:
+        fd.write(AESOCBKeyfileKey.FILE_ID + " " + repo_id + "\n")
+        fd.write(b2a_base64(b"abcdefghijklmnopqrstu").decode())
+
+    typed_input = (
+        b"2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41  02\n"  # Forgot to type "-"
+        b"2 / e29442 3506da 4e1ea7  25f62a 5a3d41 - 02\n"  # Forgot to type second "/"
+        b"2 / e29442 3506da 4e1ea7 / 25f62a 5a3d42 - 02\n"  # Typo (..42 not ..41)
+        b"2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02\n"  # Correct! Congratulations
+        b"616263 646566 676869 6a6b6c 6d6e6f 707172 - 6d\n"
+        b"\n\n"  # Abort [yN] => N
+        b"737475 88\n"  # missing "-"
+        b"73747i - 88\n"  # typo
+        b"73747 - 88\n"  # missing nibble
+        b"73 74 75  -  89\n"  # line checksum mismatch
+        b"00a1 - 88\n"  # line hash collision - overall hash mismatch, have to start over
+        b"2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02\n"
+        b"616263 646566 676869 6a6b6c 6d6e6f 707172 - 6d\n"
+        b"73 74 75  -  88\n"
+    )
+
+    # In case that this has to change, here is a quick way to find a colliding line hash:
+    #
+    # from hashlib import sha256
+    # hash_fn = lambda x: sha256(b'\x00\x02' + x).hexdigest()[:2]
+    # for i in range(1000):
+    #     if hash_fn(i.to_bytes(2, byteorder='big')) == '88':  # 88 = line hash
+    #         print(i.to_bytes(2, 'big'))
+    #         break
+
+    cmd(archiver, f"--repo={repo_location}", "key", "import", "--paper", input=typed_input)
+
+    # Test abort paths
+    typed_input = b"\ny\n"
+    cmd(archiver, f"--repo={repo_location}", "key", "import", "--paper", input=typed_input)
+    typed_input = b"2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02\n\ny\n"
+    cmd(archiver, f"--repo={repo_location}", "key", "import", "--paper", input=typed_input)
+
+
+def test_init_defaults_to_argon2(archivers, request):
+    """https://github.com/borgbackup/borg/issues/747#issuecomment-1076160401"""
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    with Repository(repo_path) as repository:
+        key = msgpack.unpackb(a2b_base64(repository.load_key()))
         assert key["algorithm"] == "argon2 chacha20-poly1305"
         assert key["algorithm"] == "argon2 chacha20-poly1305"
 
 
-    def test_change_passphrase_does_not_change_algorithm_argon2(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        os.environ["BORG_NEW_PASSPHRASE"] = "newpassphrase"
 
 
-        self.cmd(f"--repo={self.repository_location}", "key", "change-passphrase")
+def test_change_passphrase_does_not_change_algorithm_argon2(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
 
 
-        with Repository(self.repository_path) as repository:
-            key = msgpack.unpackb(a2b_base64(repository.load_key()))
-            assert key["algorithm"] == "argon2 chacha20-poly1305"
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    os.environ["BORG_NEW_PASSPHRASE"] = "newpassphrase"
+    cmd(archiver, f"--repo={repo_location}", "key", "change-passphrase")
 
 
-    def test_change_location_does_not_change_algorithm_argon2(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", KF_ENCRYPTION)
-
-        self.cmd(f"--repo={self.repository_location}", "key", "change-location", "repokey")
-
-        with Repository(self.repository_path) as repository:
-            key = msgpack.unpackb(a2b_base64(repository.load_key()))
-            assert key["algorithm"] == "argon2 chacha20-poly1305"
+    with Repository(repo_path) as repository:
+        key = msgpack.unpackb(a2b_base64(repository.load_key()))
+        assert key["algorithm"] == "argon2 chacha20-poly1305"
 
 
 
 
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    """run the same tests, but with a remote repository"""
+def test_change_location_does_not_change_algorithm_argon2(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
 
 
+    cmd(archiver, f"--repo={repo_location}", "rcreate", KF_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "key", "change-location", "repokey")
 
 
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    """runs the same tests, but via the borg binary"""
+    with Repository(repo_path) as repository:
+        key = msgpack.unpackb(a2b_base64(repository.load_key()))
+        assert key["algorithm"] == "argon2 chacha20-poly1305"

+ 72 - 75
src/borg/testsuite/archiver/list_cmd.py

@@ -1,91 +1,88 @@
 import json
 import json
 import os
 import os
-import unittest
 
 
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
-from . import (
-    ArchiverTestCaseBase,
-    RemoteArchiverTestCaseBase,
-    ArchiverTestCaseBinaryBase,
-    src_dir,
-    RK_ENCRYPTION,
-    BORG_EXES,
-)
+from . import src_dir, cmd, create_regular_file, generate_archiver_tests, RK_ENCRYPTION
 
 
+pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary")  # NOQA
 
 
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_list_format(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", src_dir)
-        output_1 = self.cmd(f"--repo={self.repository_location}", "list", "test")
-        output_2 = self.cmd(
-            f"--repo={self.repository_location}",
-            "list",
-            "test",
-            "--format",
-            "{mode} {user:6} {group:6} {size:8d} {mtime} {path}{extra}{NEWLINE}",
-        )
-        output_3 = self.cmd(f"--repo={self.repository_location}", "list", "test", "--format", "{mtime:%s} {path}{NL}")
-        self.assertEqual(output_1, output_2)
-        self.assertNotEqual(output_1, output_3)
 
 
-    def test_list_hash(self):
-        self.create_regular_file("empty_file", size=0)
-        self.create_regular_file("amb", contents=b"a" * 1000000)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        output = self.cmd(f"--repo={self.repository_location}", "list", "test", "--format", "{sha256} {path}{NL}")
-        assert "cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0 input/amb" in output
-        assert "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 input/empty_file" in output
+def test_list_format(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", src_dir)
+    output_1 = cmd(archiver, f"--repo={repo_location}", "list", "test")
+    output_2 = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "list",
+        "test",
+        "--format",
+        "{mode} {user:6} {group:6} {size:8d} {mtime} {path}{extra}{NEWLINE}",
+    )
+    output_3 = cmd(archiver, f"--repo={repo_location}", "list", "test", "--format", "{mtime:%s} {path}{NL}")
+    assert output_1 == output_2
+    assert output_1 != output_3
 
 
-    def test_list_chunk_counts(self):
-        self.create_regular_file("empty_file", size=0)
-        self.create_regular_file("two_chunks")
-        with open(os.path.join(self.input_path, "two_chunks"), "wb") as fd:
-            fd.write(b"abba" * 2000000)
-            fd.write(b"baab" * 2000000)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        output = self.cmd(
-            f"--repo={self.repository_location}", "list", "test", "--format", "{num_chunks} {unique_chunks} {path}{NL}"
-        )
-        assert "0 0 input/empty_file" in output
-        assert "2 2 input/two_chunks" in output
 
 
-    def test_list_size(self):
-        self.create_regular_file("compressible_file", size=10000)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "-C", "lz4", "test", "input")
-        output = self.cmd(f"--repo={self.repository_location}", "list", "test", "--format", "{size} {path}{NL}")
-        size, path = output.split("\n")[1].split(" ")
-        assert int(size) == 10000
+def test_list_hash(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "empty_file", size=0)
+    create_regular_file(input_path, "amb", contents=b"a" * 1000000)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    output = cmd(archiver, f"--repo={repo_location}", "list", "test", "--format", "{sha256} {path}{NL}")
+    assert "cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0 input/amb" in output
+    assert "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 input/empty_file" in output
 
 
-    def test_list_json(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
 
 
-        list_archive = self.cmd(f"--repo={self.repository_location}", "list", "test", "--json-lines")
-        items = [json.loads(s) for s in list_archive.splitlines()]
-        assert len(items) == 2
-        file1 = items[1]
-        assert file1["path"] == "input/file1"
-        assert file1["size"] == 81920
+def test_list_chunk_counts(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "empty_file", size=0)
+    create_regular_file(input_path, "two_chunks")
+    with open(os.path.join(input_path, "two_chunks"), "wb") as fd:
+        fd.write(b"abba" * 2000000)
+        fd.write(b"baab" * 2000000)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    output = cmd(
+        archiver, f"--repo={repo_location}", "list", "test", "--format", "{num_chunks} {unique_chunks} {path}{NL}"
+    )
+    assert "0 0 input/empty_file" in output
+    assert "2 2 input/two_chunks" in output
 
 
-        list_archive = self.cmd(
-            f"--repo={self.repository_location}", "list", "test", "--json-lines", "--format={sha256}"
-        )
-        items = [json.loads(s) for s in list_archive.splitlines()]
-        assert len(items) == 2
-        file1 = items[1]
-        assert file1["path"] == "input/file1"
-        assert file1["sha256"] == "b2915eb69f260d8d3c25249195f2c8f4f716ea82ec760ae929732c0262442b2b"
 
 
+def test_list_size(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "compressible_file", size=10000)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "-C", "lz4", "test", "input")
+    output = cmd(archiver, f"--repo={repo_location}", "list", "test", "--format", "{size} {path}{NL}")
+    size, path = output.split("\n")[1].split(" ")
+    assert int(size) == 10000
 
 
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    """run the same tests, but with a remote repository"""
 
 
+def test_list_json(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
 
 
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    """runs the same tests, but via the borg binary"""
+    list_archive = cmd(archiver, f"--repo={repo_location}", "list", "test", "--json-lines")
+    items = [json.loads(s) for s in list_archive.splitlines()]
+    assert len(items) == 2
+    file1 = items[1]
+    assert file1["path"] == "input/file1"
+    assert file1["size"] == 81920
+
+    list_archive = cmd(archiver, f"--repo={repo_location}", "list", "test", "--json-lines", "--format={sha256}")
+    items = [json.loads(s) for s in list_archive.splitlines()]
+    assert len(items) == 2
+    file1 = items[1]
+    assert file1["path"] == "input/file1"
+    assert file1["sha256"] == "b2915eb69f260d8d3c25249195f2c8f4f716ea82ec760ae929732c0262442b2b"

+ 14 - 18
src/borg/testsuite/archiver/lock_cmds.py

@@ -1,26 +1,22 @@
 import os
 import os
-import unittest
 
 
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
-from . import ArchiverTestCaseBase, RemoteArchiverTestCaseBase, ArchiverTestCaseBinaryBase, RK_ENCRYPTION, BORG_EXES
+from . import cmd, generate_archiver_tests, RK_ENCRYPTION
 
 
+pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary")  # NOQA
 
 
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_break_lock(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "break-lock")
 
 
-    def test_with_lock(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        lock_path = os.path.join(self.repository_path, "lock.exclusive")
-        cmd = "python3", "-c", 'import os, sys; sys.exit(42 if os.path.exists("%s") else 23)' % lock_path
-        self.cmd(f"--repo={self.repository_location}", "with-lock", *cmd, fork=True, exit_code=42)
+def test_break_lock(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "break-lock")
 
 
 
 
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    """run the same tests, but with a remote repository"""
-
-
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    """runs the same tests, but via the borg binary"""
+def test_with_lock(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    lock_path = os.path.join(repo_path, "lock.exclusive")
+    command = "python3", "-c", 'import os, sys; sys.exit(42 if os.path.exists("%s") else 23)' % lock_path
+    cmd(archiver, f"--repo={repo_location}", "with-lock", *command, fork=True, exit_code=42)

+ 322 - 320
src/borg/testsuite/archiver/mount_cmds.py

@@ -2,7 +2,6 @@ import errno
 import os
 import os
 import stat
 import stat
 import sys
 import sys
-import unittest
 
 
 import pytest
 import pytest
 
 
@@ -14,347 +13,350 @@ from .. import has_lchflags, llfuse
 from .. import changedir, no_selinux, same_ts_ns
 from .. import changedir, no_selinux, same_ts_ns
 from .. import are_symlinks_supported, are_hardlinks_supported, are_fifos_supported
 from .. import are_symlinks_supported, are_hardlinks_supported, are_fifos_supported
 from ..platform import fakeroot_detected
 from ..platform import fakeroot_detected
-from . import ArchiverTestCaseBase, ArchiverTestCaseBinaryBase, RemoteArchiverTestCaseBase, RK_ENCRYPTION, BORG_EXES
-from . import src_file, requires_hardlinks
+from . import RK_ENCRYPTION, cmd, assert_dirs_equal, create_regular_file, create_src_archive, open_archive, src_file
+from . import requires_hardlinks, _extract_hardlinks_setup, fuse_mount, create_test_files, generate_archiver_tests
 
 
+pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary")  # NOQA
 
 
-class ArchiverTestCase(ArchiverTestCaseBase):
-    @requires_hardlinks
-    @unittest.skipUnless(llfuse, "llfuse not installed")
-    def test_fuse_mount_hardlinks(self):
-        self._extract_hardlinks_setup()
-        mountpoint = os.path.join(self.tmpdir, "mountpoint")
-        # we need to get rid of permissions checking because fakeroot causes issues with it.
-        # On all platforms, borg defaults to "default_permissions" and we need to get rid of it via "ignore_permissions".
-        # On macOS (darwin), we additionally need "defer_permissions" to switch off the checks in osxfuse.
-        if sys.platform == "darwin":
-            ignore_perms = ["-o", "ignore_permissions,defer_permissions"]
+
+@requires_hardlinks
+@pytest.mark.skipif(not llfuse, reason="llfuse not installed")
+def test_fuse_mount_hardlinks(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    _extract_hardlinks_setup(archiver)
+    mountpoint = os.path.join(archiver.tmpdir, "mountpoint")
+    # we need to get rid of permissions checking because fakeroot causes issues with it.
+    # On all platforms, borg defaults to "default_permissions" and we need to get rid of it via "ignore_permissions".
+    # On macOS (darwin), we additionally need "defer_permissions" to switch off the checks in osxfuse.
+    if sys.platform == "darwin":
+        ignore_perms = ["-o", "ignore_permissions,defer_permissions"]
+    else:
+        ignore_perms = ["-o", "ignore_permissions"]
+    with fuse_mount(
+        archiver, repo_location, mountpoint, "-a", "test", "--strip-components=2", *ignore_perms
+    ), changedir(os.path.join(mountpoint, "test")):
+        assert os.stat("hardlink").st_nlink == 2
+        assert os.stat("subdir/hardlink").st_nlink == 2
+        assert open("subdir/hardlink", "rb").read() == b"123456"
+        assert os.stat("aaaa").st_nlink == 2
+        assert os.stat("source2").st_nlink == 2
+    with fuse_mount(archiver, repo_location, mountpoint, "input/dir1", "-a", "test", *ignore_perms), changedir(
+        os.path.join(mountpoint, "test")
+    ):
+        assert os.stat("input/dir1/hardlink").st_nlink == 2
+        assert os.stat("input/dir1/subdir/hardlink").st_nlink == 2
+        assert open("input/dir1/subdir/hardlink", "rb").read() == b"123456"
+        assert os.stat("input/dir1/aaaa").st_nlink == 2
+        assert os.stat("input/dir1/source2").st_nlink == 2
+    with fuse_mount(archiver, repo_location, mountpoint, "-a", "test", *ignore_perms), changedir(
+        os.path.join(mountpoint, "test")
+    ):
+        assert os.stat("input/source").st_nlink == 4
+        assert os.stat("input/abba").st_nlink == 4
+        assert os.stat("input/dir1/hardlink").st_nlink == 4
+        assert os.stat("input/dir1/subdir/hardlink").st_nlink == 4
+        assert open("input/dir1/subdir/hardlink", "rb").read() == b"123456"
+
+
+@pytest.mark.skipif(not llfuse, reason="llfuse not installed")
+def test_fuse(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    if archiver.EXE and fakeroot_detected():
+        pytest.skip("test_fuse with the binary is not compatible with fakeroot")
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    def has_noatime(some_file):
+        atime_before = os.stat(some_file).st_atime_ns
+        try:
+            os.close(os.open(some_file, flags_noatime))
+        except PermissionError:
+            return False
         else:
         else:
-            ignore_perms = ["-o", "ignore_permissions"]
-        with self.fuse_mount(
-            self.repository_location, mountpoint, "-a", "test", "--strip-components=2", *ignore_perms
-        ), changedir(os.path.join(mountpoint, "test")):
-            assert os.stat("hardlink").st_nlink == 2
-            assert os.stat("subdir/hardlink").st_nlink == 2
-            assert open("subdir/hardlink", "rb").read() == b"123456"
-            assert os.stat("aaaa").st_nlink == 2
-            assert os.stat("source2").st_nlink == 2
-        with self.fuse_mount(
-            self.repository_location, mountpoint, "input/dir1", "-a", "test", *ignore_perms
-        ), changedir(os.path.join(mountpoint, "test")):
-            assert os.stat("input/dir1/hardlink").st_nlink == 2
-            assert os.stat("input/dir1/subdir/hardlink").st_nlink == 2
-            assert open("input/dir1/subdir/hardlink", "rb").read() == b"123456"
-            assert os.stat("input/dir1/aaaa").st_nlink == 2
-            assert os.stat("input/dir1/source2").st_nlink == 2
-        with self.fuse_mount(self.repository_location, mountpoint, "-a", "test", *ignore_perms), changedir(
-            os.path.join(mountpoint, "test")
-        ):
-            assert os.stat("input/source").st_nlink == 4
-            assert os.stat("input/abba").st_nlink == 4
-            assert os.stat("input/dir1/hardlink").st_nlink == 4
-            assert os.stat("input/dir1/subdir/hardlink").st_nlink == 4
-            assert open("input/dir1/subdir/hardlink", "rb").read() == b"123456"
+            atime_after = os.stat(some_file).st_atime_ns
+            noatime_used = flags_noatime != flags_normal
+            return noatime_used and atime_before == atime_after
 
 
-    @unittest.skipUnless(llfuse, "llfuse not installed")
-    def test_fuse(self):
-        def has_noatime(some_file):
-            atime_before = os.stat(some_file).st_atime_ns
-            try:
-                os.close(os.open(some_file, flags_noatime))
-            except PermissionError:
-                return False
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_test_files(input_path)
+    have_noatime = has_noatime("input/file1")
+    cmd(archiver, f"--repo={repo_location}", "create", "--exclude-nodump", "--atime", "archive", "input")
+    cmd(archiver, f"--repo={repo_location}", "create", "--exclude-nodump", "--atime", "archive2", "input")
+    if has_lchflags:
+        # remove the file that we did not back up, so input and output become equal
+        os.remove(os.path.join("input", "flagfile"))
+    mountpoint = os.path.join(archiver.tmpdir, "mountpoint")
+    # mount the whole repository, archive contents shall show up in archivename subdirectories of mountpoint:
+    with fuse_mount(archiver, repo_location, mountpoint):
+        # flags are not supported by the FUSE mount
+        # we also ignore xattrs here, they are tested separately
+        assert_dirs_equal(
+            input_path, os.path.join(mountpoint, "archive", "input"), ignore_flags=True, ignore_xattrs=True
+        )
+        assert_dirs_equal(
+            input_path, os.path.join(mountpoint, "archive2", "input"), ignore_flags=True, ignore_xattrs=True
+        )
+    with fuse_mount(archiver, repo_location, mountpoint, "-a", "archive"):
+        assert_dirs_equal(
+            input_path, os.path.join(mountpoint, "archive", "input"), ignore_flags=True, ignore_xattrs=True
+        )
+        # regular file
+        in_fn = "input/file1"
+        out_fn = os.path.join(mountpoint, "archive", "input", "file1")
+        # stat
+        sti1 = os.stat(in_fn)
+        sto1 = os.stat(out_fn)
+        assert sti1.st_mode == sto1.st_mode
+        assert sti1.st_uid == sto1.st_uid
+        assert sti1.st_gid == sto1.st_gid
+        assert sti1.st_size == sto1.st_size
+        if have_noatime:
+            assert same_ts_ns(sti1.st_atime * 1e9, sto1.st_atime * 1e9)
+        assert same_ts_ns(sti1.st_ctime * 1e9, sto1.st_ctime * 1e9)
+        assert same_ts_ns(sti1.st_mtime * 1e9, sto1.st_mtime * 1e9)
+        if are_hardlinks_supported():
+            # note: there is another hardlink to this, see below
+            assert sti1.st_nlink == sto1.st_nlink == 2
+        # read
+        with open(in_fn, "rb") as in_f, open(out_fn, "rb") as out_f:
+            assert in_f.read() == out_f.read()
+        # hardlink (to 'input/file1')
+        if are_hardlinks_supported():
+            in_fn = "input/hardlink"
+            out_fn = os.path.join(mountpoint, "archive", "input", "hardlink")
+            sti2 = os.stat(in_fn)
+            sto2 = os.stat(out_fn)
+            assert sti2.st_nlink == sto2.st_nlink == 2
+            assert sto1.st_ino == sto2.st_ino
+        # symlink
+        if are_symlinks_supported():
+            in_fn = "input/link1"
+            out_fn = os.path.join(mountpoint, "archive", "input", "link1")
+            sti = os.stat(in_fn, follow_symlinks=False)
+            sto = os.stat(out_fn, follow_symlinks=False)
+            assert sti.st_size == len("somewhere")
+            assert sto.st_size == len("somewhere")
+            assert stat.S_ISLNK(sti.st_mode)
+            assert stat.S_ISLNK(sto.st_mode)
+            assert os.readlink(in_fn) == os.readlink(out_fn)
+        # FIFO
+        if are_fifos_supported():
+            out_fn = os.path.join(mountpoint, "archive", "input", "fifo1")
+            sto = os.stat(out_fn)
+            assert stat.S_ISFIFO(sto.st_mode)
+        # list/read xattrs
+        try:
+            in_fn = "input/fusexattr"
+            out_fn = os.fsencode(os.path.join(mountpoint, "archive", "input", "fusexattr"))
+            if not xattr.XATTR_FAKEROOT and xattr.is_enabled(input_path):
+                assert sorted(no_selinux(xattr.listxattr(out_fn))) == [b"user.empty", b"user.foo"]
+                assert xattr.getxattr(out_fn, b"user.foo") == b"bar"
+                assert xattr.getxattr(out_fn, b"user.empty") == b""
             else:
             else:
-                atime_after = os.stat(some_file).st_atime_ns
-                noatime_used = flags_noatime != flags_normal
-                return noatime_used and atime_before == atime_after
-
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_test_files()
-        have_noatime = has_noatime("input/file1")
-        self.cmd(f"--repo={self.repository_location}", "create", "--exclude-nodump", "--atime", "archive", "input")
-        self.cmd(f"--repo={self.repository_location}", "create", "--exclude-nodump", "--atime", "archive2", "input")
-        if has_lchflags:
-            # remove the file we did not backup, so input and output become equal
-            os.remove(os.path.join("input", "flagfile"))
-        mountpoint = os.path.join(self.tmpdir, "mountpoint")
-        # mount the whole repository, archive contents shall show up in archivename subdirs of mountpoint:
-        with self.fuse_mount(self.repository_location, mountpoint):
-            # flags are not supported by the FUSE mount
-            # we also ignore xattrs here, they are tested separately
-            self.assert_dirs_equal(
-                self.input_path, os.path.join(mountpoint, "archive", "input"), ignore_flags=True, ignore_xattrs=True
-            )
-            self.assert_dirs_equal(
-                self.input_path, os.path.join(mountpoint, "archive2", "input"), ignore_flags=True, ignore_xattrs=True
-            )
-        with self.fuse_mount(self.repository_location, mountpoint, "-a", "archive"):
-            self.assert_dirs_equal(
-                self.input_path, os.path.join(mountpoint, "archive", "input"), ignore_flags=True, ignore_xattrs=True
-            )
-            # regular file
-            in_fn = "input/file1"
-            out_fn = os.path.join(mountpoint, "archive", "input", "file1")
-            # stat
-            sti1 = os.stat(in_fn)
-            sto1 = os.stat(out_fn)
-            assert sti1.st_mode == sto1.st_mode
-            assert sti1.st_uid == sto1.st_uid
-            assert sti1.st_gid == sto1.st_gid
-            assert sti1.st_size == sto1.st_size
-            if have_noatime:
-                assert same_ts_ns(sti1.st_atime * 1e9, sto1.st_atime * 1e9)
-            assert same_ts_ns(sti1.st_ctime * 1e9, sto1.st_ctime * 1e9)
-            assert same_ts_ns(sti1.st_mtime * 1e9, sto1.st_mtime * 1e9)
-            if are_hardlinks_supported():
-                # note: there is another hardlink to this, see below
-                assert sti1.st_nlink == sto1.st_nlink == 2
-            # read
-            with open(in_fn, "rb") as in_f, open(out_fn, "rb") as out_f:
-                assert in_f.read() == out_f.read()
-            # hardlink (to 'input/file1')
-            if are_hardlinks_supported():
-                in_fn = "input/hardlink"
-                out_fn = os.path.join(mountpoint, "archive", "input", "hardlink")
-                sti2 = os.stat(in_fn)
-                sto2 = os.stat(out_fn)
-                assert sti2.st_nlink == sto2.st_nlink == 2
-                assert sto1.st_ino == sto2.st_ino
-            # symlink
-            if are_symlinks_supported():
-                in_fn = "input/link1"
-                out_fn = os.path.join(mountpoint, "archive", "input", "link1")
-                sti = os.stat(in_fn, follow_symlinks=False)
-                sto = os.stat(out_fn, follow_symlinks=False)
-                assert sti.st_size == len("somewhere")
-                assert sto.st_size == len("somewhere")
-                assert stat.S_ISLNK(sti.st_mode)
-                assert stat.S_ISLNK(sto.st_mode)
-                assert os.readlink(in_fn) == os.readlink(out_fn)
-            # FIFO
-            if are_fifos_supported():
-                out_fn = os.path.join(mountpoint, "archive", "input", "fifo1")
-                sto = os.stat(out_fn)
-                assert stat.S_ISFIFO(sto.st_mode)
-            # list/read xattrs
-            try:
-                in_fn = "input/fusexattr"
-                out_fn = os.fsencode(os.path.join(mountpoint, "archive", "input", "fusexattr"))
-                if not xattr.XATTR_FAKEROOT and xattr.is_enabled(self.input_path):
-                    assert sorted(no_selinux(xattr.listxattr(out_fn))) == [b"user.empty", b"user.foo"]
-                    assert xattr.getxattr(out_fn, b"user.foo") == b"bar"
-                    assert xattr.getxattr(out_fn, b"user.empty") == b""
-                else:
-                    assert no_selinux(xattr.listxattr(out_fn)) == []
-                    try:
-                        xattr.getxattr(out_fn, b"user.foo")
-                    except OSError as e:
-                        assert e.errno == llfuse.ENOATTR
-                    else:
-                        assert False, "expected OSError(ENOATTR), but no error was raised"
-            except OSError as err:
-                if sys.platform.startswith(("nothing_here_now",)) and err.errno == errno.ENOTSUP:
-                    # some systems have no xattr support on FUSE
-                    pass
+                assert no_selinux(xattr.listxattr(out_fn)) == []
+                try:
+                    xattr.getxattr(out_fn, b"user.foo")
+                except OSError as e:
+                    assert e.errno == llfuse.ENOATTR
                 else:
                 else:
-                    raise
+                    assert False, "expected OSError(ENOATTR), but no error was raised"
+        except OSError as err:
+            if sys.platform.startswith(("nothing_here_now",)) and err.errno == errno.ENOTSUP:
+                # some systems have no xattr support on FUSE
+                pass
+            else:
+                raise
 
 
-    @unittest.skipUnless(llfuse, "llfuse not installed")
-    def test_fuse_versions_view(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("test", contents=b"first")
+
+@pytest.mark.skipif(not llfuse, reason="llfuse not installed")
+def test_fuse_versions_view(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "test", contents=b"first")
+    if are_hardlinks_supported():
+        create_regular_file(input_path, "hardlink1", contents=b"123456")
+        os.link("input/hardlink1", "input/hardlink2")
+        os.link("input/hardlink1", "input/hardlink3")
+    cmd(archiver, f"--repo={repo_location}", "create", "archive1", "input")
+    create_regular_file(input_path, "test", contents=b"second")
+    cmd(archiver, f"--repo={repo_location}", "create", "archive2", "input")
+    mountpoint = os.path.join(archiver.tmpdir, "mountpoint")
+    # mount the whole repository, archive contents shall show up in versioned view:
+    with fuse_mount(archiver, repo_location, mountpoint, "-o", "versions"):
+        path = os.path.join(mountpoint, "input", "test")  # filename shows up as directory ...
+        files = os.listdir(path)
+        assert all(f.startswith("test.") for f in files)  # ... with files test.xxxxx in there
+        assert {b"first", b"second"} == {open(os.path.join(path, f), "rb").read() for f in files}
+        if are_hardlinks_supported():
+            hl1 = os.path.join(mountpoint, "input", "hardlink1", "hardlink1.00001")
+            hl2 = os.path.join(mountpoint, "input", "hardlink2", "hardlink2.00001")
+            hl3 = os.path.join(mountpoint, "input", "hardlink3", "hardlink3.00001")
+            assert os.stat(hl1).st_ino == os.stat(hl2).st_ino == os.stat(hl3).st_ino
+            assert open(hl3, "rb").read() == b"123456"
+    # similar again, but exclude the 1st hardlink:
+    with fuse_mount(archiver, repo_location, mountpoint, "-o", "versions", "-e", "input/hardlink1"):
         if are_hardlinks_supported():
         if are_hardlinks_supported():
-            self.create_regular_file("hardlink1", contents=b"123456")
-            os.link("input/hardlink1", "input/hardlink2")
-            os.link("input/hardlink1", "input/hardlink3")
-        self.cmd(f"--repo={self.repository_location}", "create", "archive1", "input")
-        self.create_regular_file("test", contents=b"second")
-        self.cmd(f"--repo={self.repository_location}", "create", "archive2", "input")
-        mountpoint = os.path.join(self.tmpdir, "mountpoint")
-        # mount the whole repository, archive contents shall show up in versioned view:
-        with self.fuse_mount(self.repository_location, mountpoint, "-o", "versions"):
-            path = os.path.join(mountpoint, "input", "test")  # filename shows up as directory ...
-            files = os.listdir(path)
-            assert all(f.startswith("test.") for f in files)  # ... with files test.xxxxx in there
-            assert {b"first", b"second"} == {open(os.path.join(path, f), "rb").read() for f in files}
-            if are_hardlinks_supported():
-                hl1 = os.path.join(mountpoint, "input", "hardlink1", "hardlink1.00001")
-                hl2 = os.path.join(mountpoint, "input", "hardlink2", "hardlink2.00001")
-                hl3 = os.path.join(mountpoint, "input", "hardlink3", "hardlink3.00001")
-                assert os.stat(hl1).st_ino == os.stat(hl2).st_ino == os.stat(hl3).st_ino
-                assert open(hl3, "rb").read() == b"123456"
-        # similar again, but exclude the 1st hardlink:
-        with self.fuse_mount(self.repository_location, mountpoint, "-o", "versions", "-e", "input/hardlink1"):
-            if are_hardlinks_supported():
-                hl2 = os.path.join(mountpoint, "input", "hardlink2", "hardlink2.00001")
-                hl3 = os.path.join(mountpoint, "input", "hardlink3", "hardlink3.00001")
-                assert os.stat(hl2).st_ino == os.stat(hl3).st_ino
-                assert open(hl3, "rb").read() == b"123456"
+            hl2 = os.path.join(mountpoint, "input", "hardlink2", "hardlink2.00001")
+            hl3 = os.path.join(mountpoint, "input", "hardlink3", "hardlink3.00001")
+            assert os.stat(hl2).st_ino == os.stat(hl3).st_ino
+            assert open(hl3, "rb").read() == b"123456"
+
+
+@pytest.mark.skipif(not llfuse, reason="llfuse not installed")
+def test_fuse_allow_damaged_files(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path = archiver.repository_location, archiver.repository_path
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_src_archive(archiver, "archive")
+    # Get rid of a chunk and repair it
+    archive, repository = open_archive(repo_path, "archive")
+    with repository:
+        for item in archive.iter_items():
+            if item.path.endswith(src_file):
+                repository.delete(item.chunks[-1].id)
+                path = item.path  # store full path for later
+                break
+        else:
+            assert False  # missed the file
+        repository.commit(compact=False)
+    cmd(archiver, f"--repo={repo_location}", "check", "--repair", exit_code=0)
+
+    mountpoint = os.path.join(archiver.tmpdir, "mountpoint")
+    with fuse_mount(archiver, repo_location, mountpoint, "-a", "archive"):
+        with pytest.raises(OSError) as excinfo:
+            open(os.path.join(mountpoint, "archive", path))
+        assert excinfo.value.errno == errno.EIO
+    with fuse_mount(archiver, repo_location, mountpoint, "-a", "archive", "-o", "allow_damaged_files"):
+        open(os.path.join(mountpoint, "archive", path)).close()
 
 
-    @unittest.skipUnless(llfuse, "llfuse not installed")
-    def test_fuse_allow_damaged_files(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_src_archive("archive")
-        # Get rid of a chunk and repair it
-        archive, repository = self.open_archive("archive")
-        with repository:
-            for item in archive.iter_items():
-                if item.path.endswith(src_file):
-                    repository.delete(item.chunks[-1].id)
-                    path = item.path  # store full path for later
-                    break
-            else:
-                assert False  # missed the file
-            repository.commit(compact=False)
-        self.cmd(f"--repo={self.repository_location}", "check", "--repair", exit_code=0)
 
 
-        mountpoint = os.path.join(self.tmpdir, "mountpoint")
-        with self.fuse_mount(self.repository_location, mountpoint, "-a", "archive"):
-            with pytest.raises(OSError) as excinfo:
-                open(os.path.join(mountpoint, "archive", path))
-            assert excinfo.value.errno == errno.EIO
-        with self.fuse_mount(self.repository_location, mountpoint, "-a", "archive", "-o", "allow_damaged_files"):
-            open(os.path.join(mountpoint, "archive", path)).close()
+@pytest.mark.skipif(not llfuse, reason="llfuse not installed")
+def test_fuse_mount_options(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_src_archive(archiver, "arch11")
+    create_src_archive(archiver, "arch12")
+    create_src_archive(archiver, "arch21")
+    create_src_archive(archiver, "arch22")
 
 
-    @unittest.skipUnless(llfuse, "llfuse not installed")
-    def test_fuse_mount_options(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_src_archive("arch11")
-        self.create_src_archive("arch12")
-        self.create_src_archive("arch21")
-        self.create_src_archive("arch22")
+    mountpoint = os.path.join(archiver.tmpdir, "mountpoint")
+    with fuse_mount(archiver, repo_location, mountpoint, "--first=2", "--sort=name"):
+        assert sorted(os.listdir(os.path.join(mountpoint))) == ["arch11", "arch12"]
+    with fuse_mount(archiver, repo_location, mountpoint, "--last=2", "--sort=name"):
+        assert sorted(os.listdir(os.path.join(mountpoint))) == ["arch21", "arch22"]
+    with fuse_mount(archiver, repo_location, mountpoint, "--match-archives=sh:arch1*"):
+        assert sorted(os.listdir(os.path.join(mountpoint))) == ["arch11", "arch12"]
+    with fuse_mount(archiver, repo_location, mountpoint, "--match-archives=sh:arch2*"):
+        assert sorted(os.listdir(os.path.join(mountpoint))) == ["arch21", "arch22"]
+    with fuse_mount(archiver, repo_location, mountpoint, "--match-archives=sh:arch*"):
+        assert sorted(os.listdir(os.path.join(mountpoint))) == ["arch11", "arch12", "arch21", "arch22"]
+    with fuse_mount(archiver, repo_location, mountpoint, "--match-archives=nope"):
+        assert sorted(os.listdir(os.path.join(mountpoint))) == []
 
 
-        mountpoint = os.path.join(self.tmpdir, "mountpoint")
-        with self.fuse_mount(self.repository_location, mountpoint, "--first=2", "--sort=name"):
-            assert sorted(os.listdir(os.path.join(mountpoint))) == ["arch11", "arch12"]
-        with self.fuse_mount(self.repository_location, mountpoint, "--last=2", "--sort=name"):
-            assert sorted(os.listdir(os.path.join(mountpoint))) == ["arch21", "arch22"]
-        with self.fuse_mount(self.repository_location, mountpoint, "--match-archives=sh:arch1*"):
-            assert sorted(os.listdir(os.path.join(mountpoint))) == ["arch11", "arch12"]
-        with self.fuse_mount(self.repository_location, mountpoint, "--match-archives=sh:arch2*"):
-            assert sorted(os.listdir(os.path.join(mountpoint))) == ["arch21", "arch22"]
-        with self.fuse_mount(self.repository_location, mountpoint, "--match-archives=sh:arch*"):
-            assert sorted(os.listdir(os.path.join(mountpoint))) == ["arch11", "arch12", "arch21", "arch22"]
-        with self.fuse_mount(self.repository_location, mountpoint, "--match-archives=nope"):
-            assert sorted(os.listdir(os.path.join(mountpoint))) == []
 
 
-    @unittest.skipUnless(llfuse, "llfuse not installed")
-    def test_migrate_lock_alive(self):
-        """Both old_id and new_id must not be stale during lock migration / daemonization."""
-        from functools import wraps
-        import pickle
-        import traceback
+@pytest.mark.skipif(not llfuse, reason="llfuse not installed")
+def test_migrate_lock_alive(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    if archiver.get_kind() == "remote":
+        pytest.skip("only works locally")
+    repo_location = archiver.repository_location
+    """Both old_id and new_id must not be stale during lock migration / daemonization."""
+    from functools import wraps
+    import pickle
+    import traceback
 
 
-        # Check results are communicated from the borg mount background process
-        # to the pytest process by means of a serialized dict object stored in this file.
-        assert_data_file = os.path.join(self.tmpdir, "migrate_lock_assert_data.pickle")
+    # Check results are communicated from the borg mount background process
+    # to the pytest process by means of a serialized dict object stored in this file.
+    assert_data_file = os.path.join(archiver.tmpdir, "migrate_lock_assert_data.pickle")
 
 
-        # Decorates Lock.migrate_lock() with process_alive() checks before and after.
-        # (We don't want to mix testing code into runtime.)
-        def write_assert_data(migrate_lock):
-            @wraps(migrate_lock)
-            def wrapper(self, old_id, new_id):
-                wrapper.num_calls += 1
-                assert_data = {
-                    "num_calls": wrapper.num_calls,
-                    "old_id": old_id,
-                    "new_id": new_id,
-                    "before": {
-                        "old_id_alive": platform.process_alive(*old_id),
-                        "new_id_alive": platform.process_alive(*new_id),
-                    },
-                    "exception": None,
-                    "exception.extr_tb": None,
-                    "after": {"old_id_alive": None, "new_id_alive": None},
-                }
+    # Decorates Lock.migrate_lock() with process_alive() checks before and after.
+    # (We don't want to mix testing code into runtime.)
+    def write_assert_data(migrate_lock):
+        @wraps(migrate_lock)
+        def wrapper(self, old_id, new_id):
+            wrapper.num_calls += 1
+            assert_data = {
+                "num_calls": wrapper.num_calls,
+                "old_id": old_id,
+                "new_id": new_id,
+                "before": {
+                    "old_id_alive": platform.process_alive(*old_id),
+                    "new_id_alive": platform.process_alive(*new_id),
+                },
+                "exception": None,
+                "exception.extr_tb": None,
+                "after": {"old_id_alive": None, "new_id_alive": None},
+            }
+            try:
+                with open(assert_data_file, "wb") as _out:
+                    pickle.dump(assert_data, _out)
+            except:
+                pass
+            try:
+                return migrate_lock(self, old_id, new_id)
+            except BaseException as e:
+                assert_data["exception"] = e
+                assert_data["exception.extr_tb"] = traceback.extract_tb(e.__traceback__)
+            finally:
+                assert_data["after"].update(
+                    {"old_id_alive": platform.process_alive(*old_id), "new_id_alive": platform.process_alive(*new_id)}
+                )
                 try:
                 try:
                     with open(assert_data_file, "wb") as _out:
                     with open(assert_data_file, "wb") as _out:
                         pickle.dump(assert_data, _out)
                         pickle.dump(assert_data, _out)
                 except:
                 except:
                     pass
                     pass
-                try:
-                    return migrate_lock(self, old_id, new_id)
-                except BaseException as e:
-                    assert_data["exception"] = e
-                    assert_data["exception.extr_tb"] = traceback.extract_tb(e.__traceback__)
-                finally:
-                    assert_data["after"].update(
-                        {
-                            "old_id_alive": platform.process_alive(*old_id),
-                            "new_id_alive": platform.process_alive(*new_id),
-                        }
-                    )
-                    try:
-                        with open(assert_data_file, "wb") as _out:
-                            pickle.dump(assert_data, _out)
-                    except:
-                        pass
 
 
-            wrapper.num_calls = 0
-            return wrapper
+        wrapper.num_calls = 0
+        return wrapper
 
 
-        # Decorate
-        Lock.migrate_lock = write_assert_data(Lock.migrate_lock)
-        try:
-            self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-            self.create_src_archive("arch")
-            mountpoint = os.path.join(self.tmpdir, "mountpoint")
-            # In order that the decoration is kept for the borg mount process, we must not spawn, but actually fork;
-            # not to be confused with the forking in borg.helpers.daemonize() which is done as well.
-            with self.fuse_mount(self.repository_location, mountpoint, os_fork=True):
-                pass
-            with open(assert_data_file, "rb") as _in:
-                assert_data = pickle.load(_in)
-            print(f"\nLock.migrate_lock(): assert_data = {assert_data!r}.", file=sys.stderr, flush=True)
-            exception = assert_data["exception"]
-            if exception is not None:
-                extracted_tb = assert_data["exception.extr_tb"]
-                print(
-                    "Lock.migrate_lock() raised an exception:\n",
-                    "Traceback (most recent call last):\n",
-                    *traceback.format_list(extracted_tb),
-                    *traceback.format_exception(exception.__class__, exception, None),
-                    sep="",
-                    end="",
-                    file=sys.stderr,
-                    flush=True,
-                )
-
-            assert assert_data["num_calls"] == 1, "Lock.migrate_lock() must be called exactly once."
-            assert exception is None, "Lock.migrate_lock() may not raise an exception."
-
-            assert_data_before = assert_data["before"]
-            assert assert_data_before[
-                "old_id_alive"
-            ], "old_id must be alive (=must not be stale) when calling Lock.migrate_lock()."
-            assert assert_data_before[
-                "new_id_alive"
-            ], "new_id must be alive (=must not be stale) when calling Lock.migrate_lock()."
-
-            assert_data_after = assert_data["after"]
-            assert assert_data_after[
-                "old_id_alive"
-            ], "old_id must be alive (=must not be stale) when Lock.migrate_lock() has returned."
-            assert assert_data_after[
-                "new_id_alive"
-            ], "new_id must be alive (=must not be stale) when Lock.migrate_lock() has returned."
-        finally:
-            # Undecorate
-            Lock.migrate_lock = Lock.migrate_lock.__wrapped__
-
-
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    """run the same tests, but with a remote repository"""
+    # Decorate
+    Lock.migrate_lock = write_assert_data(Lock.migrate_lock)
+    try:
+        cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+        create_src_archive(archiver, "arch")
+        mountpoint = os.path.join(archiver.tmpdir, "mountpoint")
+        # In order that the decoration is kept for the borg mount process, we must not spawn, but actually fork;
+        # not to be confused with the forking in borg.helpers.daemonize() which is done as well.
+        with fuse_mount(archiver, repo_location, mountpoint, os_fork=True):
+            pass
+        with open(assert_data_file, "rb") as _in:
+            assert_data = pickle.load(_in)
+        print(f"\nLock.migrate_lock(): assert_data = {assert_data!r}.", file=sys.stderr, flush=True)
+        exception = assert_data["exception"]
+        if exception is not None:
+            extracted_tb = assert_data["exception.extr_tb"]
+            print(
+                "Lock.migrate_lock() raised an exception:\n",
+                "Traceback (most recent call last):\n",
+                *traceback.format_list(extracted_tb),
+                *traceback.format_exception(exception.__class__, exception, None),
+                sep="",
+                end="",
+                file=sys.stderr,
+                flush=True,
+            )
 
 
-    @unittest.skip("only works locally")
-    def test_migrate_lock_alive(self):
-        pass
+        assert assert_data["num_calls"] == 1, "Lock.migrate_lock() must be called exactly once."
+        assert exception is None, "Lock.migrate_lock() may not raise an exception."
 
 
+        assert_data_before = assert_data["before"]
+        assert assert_data_before[
+            "old_id_alive"
+        ], "old_id must be alive (=must not be stale) when calling Lock.migrate_lock()."
+        assert assert_data_before[
+            "new_id_alive"
+        ], "new_id must be alive (=must not be stale) when calling Lock.migrate_lock()."
 
 
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    def test_fuse(self):
-        if fakeroot_detected():
-            unittest.skip("test_fuse with the binary is not compatible with fakeroot")
-        else:
-            super().test_fuse()
+        assert_data_after = assert_data["after"]
+        assert assert_data_after[
+            "old_id_alive"
+        ], "old_id must be alive (=must not be stale) when Lock.migrate_lock() has returned."
+        assert assert_data_after[
+            "new_id_alive"
+        ], "new_id must be alive (=must not be stale) when Lock.migrate_lock() has returned."
+    finally:
+        # Undecorate
+        Lock.migrate_lock = Lock.migrate_lock.__wrapped__

+ 19 - 18
src/borg/testsuite/archiver/patterns.py

@@ -4,23 +4,24 @@ from ...patterns import IECommand, PatternMatcher, parse_pattern
 from ...item import Item
 from ...item import Item
 
 
 
 
-class TestBuildFilter:
-    def test_basic(self):
-        matcher = PatternMatcher()
-        matcher.add([parse_pattern("included")], IECommand.Include)
-        filter = build_filter(matcher, 0)
-        assert filter(Item(path="included"))
-        assert filter(Item(path="included/file"))
-        assert not filter(Item(path="something else"))
+def test_basic():
+    matcher = PatternMatcher()
+    matcher.add([parse_pattern("included")], IECommand.Include)
+    filter = build_filter(matcher, 0)
+    assert filter(Item(path="included"))
+    assert filter(Item(path="included/file"))
+    assert not filter(Item(path="something else"))
 
 
-    def test_empty(self):
-        matcher = PatternMatcher(fallback=True)
-        filter = build_filter(matcher, 0)
-        assert filter(Item(path="anything"))
 
 
-    def test_strip_components(self):
-        matcher = PatternMatcher(fallback=True)
-        filter = build_filter(matcher, strip_components=1)
-        assert not filter(Item(path="shallow"))
-        assert filter(Item(path="deep enough/file"))
-        assert filter(Item(path="something/dir/file"))
+def test_empty():
+    matcher = PatternMatcher(fallback=True)
+    filter = build_filter(matcher, 0)
+    assert filter(Item(path="anything"))
+
+
+def test_strip_components():
+    matcher = PatternMatcher(fallback=True)
+    filter = build_filter(matcher, strip_components=1)
+    assert not filter(Item(path="shallow"))
+    assert filter(Item(path="deep enough/file"))
+    assert filter(Item(path="something/dir/file"))

+ 213 - 212
src/borg/testsuite/archiver/prune_cmd.py

@@ -1,227 +1,228 @@
 import re
 import re
-import unittest
 from datetime import datetime
 from datetime import datetime
 
 
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
-from . import (
-    ArchiverTestCaseBase,
-    RemoteArchiverTestCaseBase,
-    ArchiverTestCaseBinaryBase,
-    RK_ENCRYPTION,
-    src_dir,
-    BORG_EXES,
-)
+from . import cmd, RK_ENCRYPTION, src_dir, generate_archiver_tests
 
 
+pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary")  # NOQA
 
 
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_prune_repository(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test1", src_dir)
-        self.cmd(f"--repo={self.repository_location}", "create", "test2", src_dir)
-        # these are not really a checkpoints, but they look like some:
-        self.cmd(f"--repo={self.repository_location}", "create", "test3.checkpoint", src_dir)
-        self.cmd(f"--repo={self.repository_location}", "create", "test3.checkpoint.1", src_dir)
-        self.cmd(f"--repo={self.repository_location}", "create", "test4.checkpoint", src_dir)
-        output = self.cmd(f"--repo={self.repository_location}", "prune", "--list", "--dry-run", "--keep-daily=1")
-        assert re.search(r"Would prune:\s+test1", output)
-        # must keep the latest non-checkpoint archive:
-        assert re.search(r"Keeping archive \(rule: daily #1\):\s+test2", output)
-        # must keep the latest checkpoint archive:
-        assert re.search(r"Keeping checkpoint archive:\s+test4.checkpoint", output)
-        output = self.cmd(f"--repo={self.repository_location}", "rlist", "--consider-checkpoints")
-        self.assert_in("test1", output)
-        self.assert_in("test2", output)
-        self.assert_in("test3.checkpoint", output)
-        self.assert_in("test3.checkpoint.1", output)
-        self.assert_in("test4.checkpoint", output)
-        self.cmd(f"--repo={self.repository_location}", "prune", "--keep-daily=1")
-        output = self.cmd(f"--repo={self.repository_location}", "rlist", "--consider-checkpoints")
-        self.assert_not_in("test1", output)
-        # the latest non-checkpoint archive must be still there:
-        self.assert_in("test2", output)
-        # only the latest checkpoint archive must still be there:
-        self.assert_not_in("test3.checkpoint", output)
-        self.assert_not_in("test3.checkpoint.1", output)
-        self.assert_in("test4.checkpoint", output)
-        # now we supersede the latest checkpoint by a successful backup:
-        self.cmd(f"--repo={self.repository_location}", "create", "test5", src_dir)
-        self.cmd(f"--repo={self.repository_location}", "prune", "--keep-daily=2")
-        output = self.cmd(f"--repo={self.repository_location}", "rlist", "--consider-checkpoints")
-        # all checkpoints should be gone now:
-        self.assert_not_in("checkpoint", output)
-        # the latest archive must be still there
-        self.assert_in("test5", output)
 
 
-    def _create_archive_ts(self, name, y, m, d, H=0, M=0, S=0):
-        self.cmd(
-            f"--repo={self.repository_location}",
-            "create",
-            "--timestamp",
-            datetime(y, m, d, H, M, S, 0).strftime(ISO_FORMAT_NO_USECS),  # naive == local time / local tz
-            name,
-            src_dir,
-        )
+def _create_archive_ts(archiver, name, y, m, d, H=0, M=0, S=0):
+    cmd(
+        archiver,
+        f"--repo={archiver.repository_location}",
+        "create",
+        "--timestamp",
+        datetime(y, m, d, H, M, S, 0).strftime(ISO_FORMAT_NO_USECS),  # naive == local time / local tz
+        name,
+        src_dir,
+    )
 
 
-    # This test must match docs/misc/prune-example.txt
-    def test_prune_repository_example(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        # Archives that will be kept, per the example
-        # Oldest archive
-        self._create_archive_ts("test01", 2015, 1, 1)
-        # 6 monthly archives
-        self._create_archive_ts("test02", 2015, 6, 30)
-        self._create_archive_ts("test03", 2015, 7, 31)
-        self._create_archive_ts("test04", 2015, 8, 31)
-        self._create_archive_ts("test05", 2015, 9, 30)
-        self._create_archive_ts("test06", 2015, 10, 31)
-        self._create_archive_ts("test07", 2015, 11, 30)
-        # 14 daily archives
-        self._create_archive_ts("test08", 2015, 12, 17)
-        self._create_archive_ts("test09", 2015, 12, 18)
-        self._create_archive_ts("test10", 2015, 12, 20)
-        self._create_archive_ts("test11", 2015, 12, 21)
-        self._create_archive_ts("test12", 2015, 12, 22)
-        self._create_archive_ts("test13", 2015, 12, 23)
-        self._create_archive_ts("test14", 2015, 12, 24)
-        self._create_archive_ts("test15", 2015, 12, 25)
-        self._create_archive_ts("test16", 2015, 12, 26)
-        self._create_archive_ts("test17", 2015, 12, 27)
-        self._create_archive_ts("test18", 2015, 12, 28)
-        self._create_archive_ts("test19", 2015, 12, 29)
-        self._create_archive_ts("test20", 2015, 12, 30)
-        self._create_archive_ts("test21", 2015, 12, 31)
-        # Additional archives that would be pruned
-        # The second backup of the year
-        self._create_archive_ts("test22", 2015, 1, 2)
-        # The next older monthly backup
-        self._create_archive_ts("test23", 2015, 5, 31)
-        # The next older daily backup
-        self._create_archive_ts("test24", 2015, 12, 16)
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "prune",
-            "--list",
-            "--dry-run",
-            "--keep-daily=14",
-            "--keep-monthly=6",
-            "--keep-yearly=1",
-        )
-        # Prune second backup of the year
-        assert re.search(r"Would prune:\s+test22", output)
-        # Prune next older monthly and daily backups
-        assert re.search(r"Would prune:\s+test23", output)
-        assert re.search(r"Would prune:\s+test24", output)
-        # Must keep the other 21 backups
-        # Yearly is kept as oldest archive
-        assert re.search(r"Keeping archive \(rule: yearly\[oldest\] #1\):\s+test01", output)
-        for i in range(1, 7):
-            assert re.search(r"Keeping archive \(rule: monthly #" + str(i) + r"\):\s+test" + ("%02d" % (8 - i)), output)
-        for i in range(1, 15):
-            assert re.search(r"Keeping archive \(rule: daily #" + str(i) + r"\):\s+test" + ("%02d" % (22 - i)), output)
-        output = self.cmd(f"--repo={self.repository_location}", "rlist")
-        # Nothing pruned after dry run
-        for i in range(1, 25):
-            self.assert_in("test%02d" % i, output)
-        self.cmd(
-            f"--repo={self.repository_location}", "prune", "--keep-daily=14", "--keep-monthly=6", "--keep-yearly=1"
-        )
-        output = self.cmd(f"--repo={self.repository_location}", "rlist")
-        # All matching backups plus oldest kept
-        for i in range(1, 22):
-            self.assert_in("test%02d" % i, output)
-        # Other backups have been pruned
-        for i in range(22, 25):
-            self.assert_not_in("test%02d" % i, output)
 
 
-    # With an initial and daily backup, prune daily until oldest is replaced by a monthly backup
-    def test_prune_retain_and_expire_oldest(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        # Initial backup
-        self._create_archive_ts("original_archive", 2020, 9, 1, 11, 15)
-        # Archive and prune daily for 30 days
-        for i in range(1, 31):
-            self._create_archive_ts("september%02d" % i, 2020, 9, i, 12)
-            self.cmd(f"--repo={self.repository_location}", "prune", "--keep-daily=7", "--keep-monthly=1")
-        # Archive and prune 6 days into the next month
-        for i in range(1, 7):
-            self._create_archive_ts("october%02d" % i, 2020, 10, i, 12)
-            self.cmd(f"--repo={self.repository_location}", "prune", "--keep-daily=7", "--keep-monthly=1")
-        # Oldest backup is still retained
-        output = self.cmd(
-            f"--repo={self.repository_location}", "prune", "--list", "--dry-run", "--keep-daily=7", "--keep-monthly=1"
-        )
-        assert re.search(r"Keeping archive \(rule: monthly\[oldest\] #1" + r"\):\s+original_archive", output)
-        # Archive one more day and prune.
-        self._create_archive_ts("october07", 2020, 10, 7, 12)
-        self.cmd(f"--repo={self.repository_location}", "prune", "--keep-daily=7", "--keep-monthly=1")
-        # Last day of previous month is retained as monthly, and oldest is expired.
-        output = self.cmd(
-            f"--repo={self.repository_location}", "prune", "--list", "--dry-run", "--keep-daily=7", "--keep-monthly=1"
-        )
-        assert re.search(r"Keeping archive \(rule: monthly #1\):\s+september30", output)
-        self.assert_not_in("original_archive", output)
+def test_prune_repository(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test1", src_dir)
+    cmd(archiver, f"--repo={repo_location}", "create", "test2", src_dir)
+    # these are not really a checkpoints, but they look like some:
+    cmd(archiver, f"--repo={repo_location}", "create", "test3.checkpoint", src_dir)
+    cmd(archiver, f"--repo={repo_location}", "create", "test3.checkpoint.1", src_dir)
+    cmd(archiver, f"--repo={repo_location}", "create", "test4.checkpoint", src_dir)
+    output = cmd(archiver, f"--repo={repo_location}", "prune", "--list", "--dry-run", "--keep-daily=1")
+    assert re.search(r"Would prune:\s+test1", output)
+    # must keep the latest non-checkpoint archive:
+    assert re.search(r"Keeping archive \(rule: daily #1\):\s+test2", output)
+    # must keep the latest checkpoint archive:
+    assert re.search(r"Keeping checkpoint archive:\s+test4.checkpoint", output)
+    output = cmd(archiver, f"--repo={repo_location}", "rlist", "--consider-checkpoints")
+    assert "test1" in output
+    assert "test2" in output
+    assert "test3.checkpoint" in output
+    assert "test3.checkpoint.1" in output
+    assert "test4.checkpoint" in output
+    cmd(archiver, f"--repo={repo_location}", "prune", "--keep-daily=1")
+    output = cmd(archiver, f"--repo={repo_location}", "rlist", "--consider-checkpoints")
+    assert "test1" not in output
+    # the latest non-checkpoint archive must be still there:
+    assert "test2" in output
+    # only the latest checkpoint archive must still be there:
+    assert "test3.checkpoint" not in output
+    assert "test3.checkpoint.1" not in output
+    assert "test4.checkpoint" in output
+    # now we supersede the latest checkpoint by a successful backup:
+    cmd(archiver, f"--repo={repo_location}", "create", "test5", src_dir)
+    cmd(archiver, f"--repo={repo_location}", "prune", "--keep-daily=2")
+    output = cmd(archiver, f"--repo={repo_location}", "rlist", "--consider-checkpoints")
+    # all checkpoints should be gone now:
+    assert "checkpoint" not in output
+    # the latest archive must be still there
+    assert "test5" in output
 
 
-    def test_prune_repository_prefix(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "foo-2015-08-12-10:00", src_dir)
-        self.cmd(f"--repo={self.repository_location}", "create", "foo-2015-08-12-20:00", src_dir)
-        self.cmd(f"--repo={self.repository_location}", "create", "bar-2015-08-12-10:00", src_dir)
-        self.cmd(f"--repo={self.repository_location}", "create", "bar-2015-08-12-20:00", src_dir)
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "prune",
-            "--list",
-            "--dry-run",
-            "--keep-daily=1",
-            "--match-archives=sh:foo-*",
-        )
-        assert re.search(r"Keeping archive \(rule: daily #1\):\s+foo-2015-08-12-20:00", output)
-        assert re.search(r"Would prune:\s+foo-2015-08-12-10:00", output)
-        output = self.cmd(f"--repo={self.repository_location}", "rlist")
-        self.assert_in("foo-2015-08-12-10:00", output)
-        self.assert_in("foo-2015-08-12-20:00", output)
-        self.assert_in("bar-2015-08-12-10:00", output)
-        self.assert_in("bar-2015-08-12-20:00", output)
-        self.cmd(f"--repo={self.repository_location}", "prune", "--keep-daily=1", "--match-archives=sh:foo-*")
-        output = self.cmd(f"--repo={self.repository_location}", "rlist")
-        self.assert_not_in("foo-2015-08-12-10:00", output)
-        self.assert_in("foo-2015-08-12-20:00", output)
-        self.assert_in("bar-2015-08-12-10:00", output)
-        self.assert_in("bar-2015-08-12-20:00", output)
 
 
-    def test_prune_repository_glob(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "2015-08-12-10:00-foo", src_dir)
-        self.cmd(f"--repo={self.repository_location}", "create", "2015-08-12-20:00-foo", src_dir)
-        self.cmd(f"--repo={self.repository_location}", "create", "2015-08-12-10:00-bar", src_dir)
-        self.cmd(f"--repo={self.repository_location}", "create", "2015-08-12-20:00-bar", src_dir)
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "prune",
-            "--list",
-            "--dry-run",
-            "--keep-daily=1",
-            "--match-archives=sh:2015-*-foo",
-        )
-        assert re.search(r"Keeping archive \(rule: daily #1\):\s+2015-08-12-20:00-foo", output)
-        assert re.search(r"Would prune:\s+2015-08-12-10:00-foo", output)
-        output = self.cmd(f"--repo={self.repository_location}", "rlist")
-        self.assert_in("2015-08-12-10:00-foo", output)
-        self.assert_in("2015-08-12-20:00-foo", output)
-        self.assert_in("2015-08-12-10:00-bar", output)
-        self.assert_in("2015-08-12-20:00-bar", output)
-        self.cmd(f"--repo={self.repository_location}", "prune", "--keep-daily=1", "--match-archives=sh:2015-*-foo")
-        output = self.cmd(f"--repo={self.repository_location}", "rlist")
-        self.assert_not_in("2015-08-12-10:00-foo", output)
-        self.assert_in("2015-08-12-20:00-foo", output)
-        self.assert_in("2015-08-12-10:00-bar", output)
-        self.assert_in("2015-08-12-20:00-bar", output)
+# This test must match docs/misc/prune-example.txt
+def test_prune_repository_example(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    # Archives that will be kept, per the example
+    # Oldest archive
+    _create_archive_ts(archiver, "test01", 2015, 1, 1)
+    # 6 monthly archives
+    _create_archive_ts(archiver, "test02", 2015, 6, 30)
+    _create_archive_ts(archiver, "test03", 2015, 7, 31)
+    _create_archive_ts(archiver, "test04", 2015, 8, 31)
+    _create_archive_ts(archiver, "test05", 2015, 9, 30)
+    _create_archive_ts(archiver, "test06", 2015, 10, 31)
+    _create_archive_ts(archiver, "test07", 2015, 11, 30)
+    # 14 daily archives
+    _create_archive_ts(archiver, "test08", 2015, 12, 17)
+    _create_archive_ts(archiver, "test09", 2015, 12, 18)
+    _create_archive_ts(archiver, "test10", 2015, 12, 20)
+    _create_archive_ts(archiver, "test11", 2015, 12, 21)
+    _create_archive_ts(archiver, "test12", 2015, 12, 22)
+    _create_archive_ts(archiver, "test13", 2015, 12, 23)
+    _create_archive_ts(archiver, "test14", 2015, 12, 24)
+    _create_archive_ts(archiver, "test15", 2015, 12, 25)
+    _create_archive_ts(archiver, "test16", 2015, 12, 26)
+    _create_archive_ts(archiver, "test17", 2015, 12, 27)
+    _create_archive_ts(archiver, "test18", 2015, 12, 28)
+    _create_archive_ts(archiver, "test19", 2015, 12, 29)
+    _create_archive_ts(archiver, "test20", 2015, 12, 30)
+    _create_archive_ts(archiver, "test21", 2015, 12, 31)
+    # Additional archives that would be pruned
+    # The second backup of the year
+    _create_archive_ts(archiver, "test22", 2015, 1, 2)
+    # The next older monthly backup
+    _create_archive_ts(archiver, "test23", 2015, 5, 31)
+    # The next older daily backup
+    _create_archive_ts(archiver, "test24", 2015, 12, 16)
+    output = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "prune",
+        "--list",
+        "--dry-run",
+        "--keep-daily=14",
+        "--keep-monthly=6",
+        "--keep-yearly=1",
+    )
+    # Prune second backup of the year
+    assert re.search(r"Would prune:\s+test22", output)
+    # Prune next older monthly and daily backups
+    assert re.search(r"Would prune:\s+test23", output)
+    assert re.search(r"Would prune:\s+test24", output)
+    # Must keep the other 21 backups
+    # Yearly is kept as oldest archive
+    assert re.search(r"Keeping archive \(rule: yearly\[oldest\] #1\):\s+test01", output)
+    for i in range(1, 7):
+        assert re.search(r"Keeping archive \(rule: monthly #" + str(i) + r"\):\s+test" + ("%02d" % (8 - i)), output)
+    for i in range(1, 15):
+        assert re.search(r"Keeping archive \(rule: daily #" + str(i) + r"\):\s+test" + ("%02d" % (22 - i)), output)
+    output = cmd(archiver, f"--repo={repo_location}", "rlist")
+    # Nothing pruned after dry run
+    for i in range(1, 25):
+        assert "test%02d" % i in output
+    cmd(archiver, f"--repo={repo_location}", "prune", "--keep-daily=14", "--keep-monthly=6", "--keep-yearly=1")
+    output = cmd(archiver, f"--repo={repo_location}", "rlist")
+    # All matching backups plus oldest kept
+    for i in range(1, 22):
+        assert "test%02d" % i in output
+    # Other backups have been pruned
+    for i in range(22, 25):
+        assert "test%02d" % i not in output
 
 
 
 
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    """run the same tests, but with a remote repository"""
+# With an initial and daily backup, prune daily until oldest is replaced by a monthly backup
+def test_prune_retain_and_expire_oldest(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    # Initial backup
+    _create_archive_ts(archiver, "original_archive", 2020, 9, 1, 11, 15)
+    # Archive and prune daily for 30 days
+    for i in range(1, 31):
+        _create_archive_ts(archiver, "september%02d" % i, 2020, 9, i, 12)
+        cmd(archiver, f"--repo={repo_location}", "prune", "--keep-daily=7", "--keep-monthly=1")
+    # Archive and prune 6 days into the next month
+    for i in range(1, 7):
+        _create_archive_ts(archiver, "october%02d" % i, 2020, 10, i, 12)
+        cmd(archiver, f"--repo={repo_location}", "prune", "--keep-daily=7", "--keep-monthly=1")
+    # Oldest backup is still retained
+    output = cmd(
+        archiver, f"--repo={repo_location}", "prune", "--list", "--dry-run", "--keep-daily=7", "--keep-monthly=1"
+    )
+    assert re.search(r"Keeping archive \(rule: monthly\[oldest\] #1" + r"\):\s+original_archive", output)
+    # Archive one more day and prune.
+    _create_archive_ts(archiver, "october07", 2020, 10, 7, 12)
+    cmd(archiver, f"--repo={repo_location}", "prune", "--keep-daily=7", "--keep-monthly=1")
+    # Last day of previous month is retained as monthly, and oldest is expired.
+    output = cmd(
+        archiver, f"--repo={repo_location}", "prune", "--list", "--dry-run", "--keep-daily=7", "--keep-monthly=1"
+    )
+    assert re.search(r"Keeping archive \(rule: monthly #1\):\s+september30", output)
+    assert "original_archive" not in output
 
 
 
 
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    """runs the same tests, but via the borg binary"""
+def test_prune_repository_prefix(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "foo-2015-08-12-10:00", src_dir)
+    cmd(archiver, f"--repo={repo_location}", "create", "foo-2015-08-12-20:00", src_dir)
+    cmd(archiver, f"--repo={repo_location}", "create", "bar-2015-08-12-10:00", src_dir)
+    cmd(archiver, f"--repo={repo_location}", "create", "bar-2015-08-12-20:00", src_dir)
+    output = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "prune",
+        "--list",
+        "--dry-run",
+        "--keep-daily=1",
+        "--match-archives=sh:foo-*",
+    )
+    assert re.search(r"Keeping archive \(rule: daily #1\):\s+foo-2015-08-12-20:00", output)
+    assert re.search(r"Would prune:\s+foo-2015-08-12-10:00", output)
+    output = cmd(archiver, f"--repo={repo_location}", "rlist")
+    assert "foo-2015-08-12-10:00" in output
+    assert "foo-2015-08-12-20:00" in output
+    assert "bar-2015-08-12-10:00" in output
+    assert "bar-2015-08-12-20:00" in output
+    cmd(archiver, f"--repo={repo_location}", "prune", "--keep-daily=1", "--match-archives=sh:foo-*")
+    output = cmd(archiver, f"--repo={repo_location}", "rlist")
+    assert "foo-2015-08-12-10:00" not in output
+    assert "foo-2015-08-12-20:00" in output
+    assert "bar-2015-08-12-10:00" in output
+    assert "bar-2015-08-12-20:00" in output
+
+
+def test_prune_repository_glob(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "2015-08-12-10:00-foo", src_dir)
+    cmd(archiver, f"--repo={repo_location}", "create", "2015-08-12-20:00-foo", src_dir)
+    cmd(archiver, f"--repo={repo_location}", "create", "2015-08-12-10:00-bar", src_dir)
+    cmd(archiver, f"--repo={repo_location}", "create", "2015-08-12-20:00-bar", src_dir)
+    output = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "prune",
+        "--list",
+        "--dry-run",
+        "--keep-daily=1",
+        "--match-archives=sh:2015-*-foo",
+    )
+    assert re.search(r"Keeping archive \(rule: daily #1\):\s+2015-08-12-20:00-foo", output)
+    assert re.search(r"Would prune:\s+2015-08-12-10:00-foo", output)
+    output = cmd(archiver, f"--repo={repo_location}", "rlist")
+    assert "2015-08-12-10:00-foo" in output
+    assert "2015-08-12-20:00-foo" in output
+    assert "2015-08-12-10:00-bar" in output
+    assert "2015-08-12-20:00-bar" in output
+    cmd(archiver, f"--repo={repo_location}", "prune", "--keep-daily=1", "--match-archives=sh:2015-*-foo")
+    output = cmd(archiver, f"--repo={repo_location}", "rlist")
+    assert "2015-08-12-10:00-foo" not in output
+    assert "2015-08-12-20:00-foo" in output
+    assert "2015-08-12-10:00-bar" in output
+    assert "2015-08-12-20:00-bar" in output

+ 65 - 64
src/borg/testsuite/archiver/rcompress_cmd.py

@@ -6,78 +6,79 @@ from ...repository import Repository
 from ...manifest import Manifest
 from ...manifest import Manifest
 from ...compress import ZSTD, ZLIB, LZ4, CNONE
 from ...compress import ZSTD, ZLIB, LZ4, CNONE
 
 
-from . import ArchiverTestCaseBase, RK_ENCRYPTION
+from . import create_regular_file, cmd, RK_ENCRYPTION
 
 
 
 
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_rcompress(self):
-        def check_compression(ctype, clevel, olevel):
-            """check if all the chunks in the repo are compressed/obfuscated like expected"""
-            repository = Repository(self.repository_path, exclusive=True)
-            with repository:
-                manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-                state = None
-                while True:
-                    ids, state = repository.scan(limit=LIST_SCAN_LIMIT, state=state)
-                    if not ids:
-                        break
-                    for id in ids:
-                        chunk = repository.get(id, read_data=True)
-                        meta, data = manifest.repo_objs.parse(id, chunk)  # will also decompress according to metadata
-                        m_olevel = meta.get("olevel", -1)
-                        m_psize = meta.get("psize", -1)
-                        print(
-                            hexlify(id).decode(),
-                            meta["ctype"],
-                            meta["clevel"],
-                            meta["csize"],
-                            meta["size"],
-                            m_olevel,
-                            m_psize,
-                        )
-                        # this is not as easy as one thinks due to the DecidingCompressor choosing the smallest of
-                        # (desired compressed, lz4 compressed, not compressed).
-                        assert meta["ctype"] in (ctype, LZ4.ID, CNONE.ID)
-                        assert meta["clevel"] in (clevel, 255)  # LZ4 and CNONE has level 255
-                        if olevel != -1:  # we expect obfuscation
-                            assert "psize" in meta
-                            assert m_olevel == olevel
-                        else:
-                            assert "psize" not in meta
-                            assert "olevel" not in meta
+def test_rcompress(archiver):
+    repo_location, input_path = archiver.repository_location, archiver.input_path
 
 
-        self.create_regular_file("file1", size=1024 * 10)
-        self.create_regular_file("file2", contents=os.urandom(1024 * 10))
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
+    def check_compression(ctype, clevel, olevel):
+        """check if all the chunks in the repo are compressed/obfuscated like expected"""
+        repository = Repository(archiver.repository_path, exclusive=True)
+        with repository:
+            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+            state = None
+            while True:
+                ids, state = repository.scan(limit=LIST_SCAN_LIMIT, state=state)
+                if not ids:
+                    break
+                for id in ids:
+                    chunk = repository.get(id, read_data=True)
+                    meta, data = manifest.repo_objs.parse(id, chunk)  # will also decompress according to metadata
+                    m_olevel = meta.get("olevel", -1)
+                    m_psize = meta.get("psize", -1)
+                    print(
+                        hexlify(id).decode(),
+                        meta["ctype"],
+                        meta["clevel"],
+                        meta["csize"],
+                        meta["size"],
+                        m_olevel,
+                        m_psize,
+                    )
+                    # this is not as easy as one thinks due to the DecidingCompressor choosing the smallest of
+                    # (desired compressed, lz4 compressed, not compressed).
+                    assert meta["ctype"] in (ctype, LZ4.ID, CNONE.ID)
+                    assert meta["clevel"] in (clevel, 255)  # LZ4 and CNONE has level 255
+                    if olevel != -1:  # we expect obfuscation
+                        assert "psize" in meta
+                        assert m_olevel == olevel
+                    else:
+                        assert "psize" not in meta
+                        assert "olevel" not in meta
 
 
-        cname, ctype, clevel, olevel = ZLIB.name, ZLIB.ID, 3, -1
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input", "-C", f"{cname},{clevel}")
-        check_compression(ctype, clevel, olevel)
+    create_regular_file(input_path, "file1", size=1024 * 10)
+    create_regular_file(input_path, "file2", contents=os.urandom(1024 * 10))
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
 
 
-        cname, ctype, clevel, olevel = ZSTD.name, ZSTD.ID, 1, -1  # change compressor (and level)
-        self.cmd(f"--repo={self.repository_location}", "rcompress", "-C", f"{cname},{clevel}")
-        check_compression(ctype, clevel, olevel)
+    cname, ctype, clevel, olevel = ZLIB.name, ZLIB.ID, 3, -1
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input", "-C", f"{cname},{clevel}")
+    check_compression(ctype, clevel, olevel)
 
 
-        cname, ctype, clevel, olevel = ZSTD.name, ZSTD.ID, 3, -1  # only change level
-        self.cmd(f"--repo={self.repository_location}", "rcompress", "-C", f"{cname},{clevel}")
-        check_compression(ctype, clevel, olevel)
+    cname, ctype, clevel, olevel = ZSTD.name, ZSTD.ID, 1, -1  # change compressor (and level)
+    cmd(archiver, f"--repo={repo_location}", "rcompress", "-C", f"{cname},{clevel}")
+    check_compression(ctype, clevel, olevel)
 
 
-        cname, ctype, clevel, olevel = ZSTD.name, ZSTD.ID, 3, 110  # only change to obfuscated
-        self.cmd(f"--repo={self.repository_location}", "rcompress", "-C", f"obfuscate,{olevel},{cname},{clevel}")
-        check_compression(ctype, clevel, olevel)
+    cname, ctype, clevel, olevel = ZSTD.name, ZSTD.ID, 3, -1  # only change level
+    cmd(archiver, f"--repo={repo_location}", "rcompress", "-C", f"{cname},{clevel}")
+    check_compression(ctype, clevel, olevel)
 
 
-        cname, ctype, clevel, olevel = ZSTD.name, ZSTD.ID, 3, 112  # only change obfuscation level
-        self.cmd(f"--repo={self.repository_location}", "rcompress", "-C", f"obfuscate,{olevel},{cname},{clevel}")
-        check_compression(ctype, clevel, olevel)
+    cname, ctype, clevel, olevel = ZSTD.name, ZSTD.ID, 3, 110  # only change to obfuscated
+    cmd(archiver, f"--repo={repo_location}", "rcompress", "-C", f"obfuscate,{olevel},{cname},{clevel}")
+    check_compression(ctype, clevel, olevel)
 
 
-        cname, ctype, clevel, olevel = ZSTD.name, ZSTD.ID, 3, -1  # change to not obfuscated
-        self.cmd(f"--repo={self.repository_location}", "rcompress", "-C", f"{cname},{clevel}")
-        check_compression(ctype, clevel, olevel)
+    cname, ctype, clevel, olevel = ZSTD.name, ZSTD.ID, 3, 112  # only change obfuscation level
+    cmd(archiver, f"--repo={repo_location}", "rcompress", "-C", f"obfuscate,{olevel},{cname},{clevel}")
+    check_compression(ctype, clevel, olevel)
 
 
-        cname, ctype, clevel, olevel = ZLIB.name, ZLIB.ID, 1, -1
-        self.cmd(f"--repo={self.repository_location}", "rcompress", "-C", f"auto,{cname},{clevel}")
-        check_compression(ctype, clevel, olevel)
+    cname, ctype, clevel, olevel = ZSTD.name, ZSTD.ID, 3, -1  # change to not obfuscated
+    cmd(archiver, f"--repo={repo_location}", "rcompress", "-C", f"{cname},{clevel}")
+    check_compression(ctype, clevel, olevel)
 
 
-        cname, ctype, clevel, olevel = ZLIB.name, ZLIB.ID, 2, 111
-        self.cmd(f"--repo={self.repository_location}", "rcompress", "-C", f"obfuscate,{olevel},auto,{cname},{clevel}")
-        check_compression(ctype, clevel, olevel)
+    cname, ctype, clevel, olevel = ZLIB.name, ZLIB.ID, 1, -1
+    cmd(archiver, f"--repo={repo_location}", "rcompress", "-C", f"auto,{cname},{clevel}")
+    check_compression(ctype, clevel, olevel)
+
+    cname, ctype, clevel, olevel = ZLIB.name, ZLIB.ID, 2, 111
+    cmd(archiver, f"--repo={repo_location}", "rcompress", "-C", f"obfuscate,{olevel},auto,{cname},{clevel}")
+    check_compression(ctype, clevel, olevel)

+ 71 - 76
src/borg/testsuite/archiver/rcreate_cmd.py

@@ -1,5 +1,4 @@
 import os
 import os
-import unittest
 from unittest.mock import patch
 from unittest.mock import patch
 
 
 import pytest
 import pytest
@@ -9,79 +8,75 @@ from ...constants import *  # NOQA
 from ...crypto.key import FlexiKey
 from ...crypto.key import FlexiKey
 from ...repository import Repository
 from ...repository import Repository
 from .. import environment_variable
 from .. import environment_variable
-from . import (
-    ArchiverTestCaseBase,
-    ArchiverTestCaseBinaryBase,
-    RemoteArchiverTestCaseBase,
-    RK_ENCRYPTION,
-    KF_ENCRYPTION,
-    BORG_EXES,
-)
-
-
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_rcreate_parent_dirs(self):
-        parent_path = os.path.join(self.tmpdir, "parent1", "parent2")
-        repository_path = os.path.join(parent_path, "repository")
-        repository_location = self.prefix + repository_path
-        with pytest.raises(Repository.ParentPathDoesNotExist):
-            # normal borg rcreate does NOT create missing parent dirs
-            self.cmd(f"--repo={repository_location}", "rcreate", "--encryption=none")
-        # but if told so, it does:
-        self.cmd(f"--repo={repository_location}", "rcreate", "--encryption=none", "--make-parent-dirs")
-        assert os.path.exists(parent_path)
-
-    def test_rcreate_interrupt(self):
-        def raise_eof(*args, **kwargs):
-            raise EOFError
-
-        with patch.object(FlexiKey, "create", raise_eof):
-            self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION, exit_code=1)
-        assert not os.path.exists(self.repository_location)
-
-    def test_rcreate_requires_encryption_option(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", exit_code=2)
-
-    def test_rcreate_nested_repositories(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        if self.FORK_DEFAULT:
-            self.cmd(f"--repo={self.repository_location}/nested", "rcreate", RK_ENCRYPTION, exit_code=2)
+from . import cmd, generate_archiver_tests, RK_ENCRYPTION, KF_ENCRYPTION
+
+pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary")  # NOQA
+
+
+def test_rcreate_parent_dirs(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    if archiver.EXE:
+        pytest.skip("does not raise Exception, but sets rc==2")
+    remote_repo = archiver.get_kind() == "remote"
+    parent_path = os.path.join(archiver.tmpdir, "parent1", "parent2")
+    repository_path = os.path.join(parent_path, "repository")
+    repository_location = ("ssh://__testsuite__" + repository_path) if remote_repo else repository_path
+    with pytest.raises(Repository.ParentPathDoesNotExist):
+        # normal borg rcreate does NOT create missing parent dirs
+        cmd(archiver, f"--repo={repository_location}", "rcreate", "--encryption=none")
+    # but if told so, it does:
+    cmd(archiver, f"--repo={repository_location}", "rcreate", "--encryption=none", "--make-parent-dirs")
+    assert os.path.exists(parent_path)
+
+
+def test_rcreate_interrupt(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    if archiver.EXE:
+        pytest.skip("patches object")
+
+    def raise_eof(*args, **kwargs):
+        raise EOFError
+
+    with patch.object(FlexiKey, "create", raise_eof):
+        cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION, exit_code=1)
+    assert not os.path.exists(repo_location)
+
+
+def test_rcreate_requires_encryption_option(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    cmd(archiver, f"--repo={archiver.repository_location}", "rcreate", exit_code=2)
+
+
+def test_rcreate_nested_repositories(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    if archiver.FORK_DEFAULT:
+        cmd(archiver, f"--repo={repo_location}/nested", "rcreate", RK_ENCRYPTION, exit_code=2)
+    else:
+        with pytest.raises(Repository.AlreadyExists):
+            cmd(archiver, f"--repo={repo_location}/nested", "rcreate", RK_ENCRYPTION)
+
+
+def test_rcreate_refuse_to_overwrite_keyfile(archivers, request):
+    #  BORG_KEY_FILE=something borg rcreate should quit if "something" already exists.
+    #  See: https://github.com/borgbackup/borg/pull/6046
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+
+    keyfile = os.path.join(archiver.tmpdir, "keyfile")
+    with environment_variable(BORG_KEY_FILE=keyfile):
+        cmd(archiver, f"--repo={repo_location}0", "rcreate", KF_ENCRYPTION)
+        with open(keyfile) as file:
+            before = file.read()
+        arg = (f"--repo={repo_location}1", "rcreate", KF_ENCRYPTION)
+        if archiver.FORK_DEFAULT:
+            cmd(archiver, *arg, exit_code=2)
         else:
         else:
-            with pytest.raises(Repository.AlreadyExists):
-                self.cmd(f"--repo={self.repository_location}/nested", "rcreate", RK_ENCRYPTION)
-
-    def test_rcreate_refuse_to_overwrite_keyfile(self):
-        """BORG_KEY_FILE=something borg rcreate should quit if "something" already exists.
-
-        See https://github.com/borgbackup/borg/pull/6046"""
-        keyfile = os.path.join(self.tmpdir, "keyfile")
-        with environment_variable(BORG_KEY_FILE=keyfile):
-            self.cmd(f"--repo={self.repository_location}0", "rcreate", KF_ENCRYPTION)
-            with open(keyfile) as file:
-                before = file.read()
-            arg = (f"--repo={self.repository_location}1", "rcreate", KF_ENCRYPTION)
-            if self.FORK_DEFAULT:
-                self.cmd(*arg, exit_code=2)
-            else:
-                with pytest.raises(Error):
-                    self.cmd(*arg)
-            with open(keyfile) as file:
-                after = file.read()
-            assert before == after
-
-
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    """run the same tests, but with a remote repository"""
-
-
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    """runs the same tests, but via the borg binary"""
-
-    @unittest.skip("does not raise Exception, but sets rc==2")
-    def test_rcreate_parent_dirs(self):
-        pass
-
-    @unittest.skip("patches objects")
-    def test_rcreate_interrupt(self):
-        pass
+            with pytest.raises(Error):
+                cmd(archiver, *arg)
+        with open(keyfile) as file:
+            after = file.read()
+        assert before == after

+ 19 - 24
src/borg/testsuite/archiver/rdelete_cmd.py

@@ -1,30 +1,25 @@
 import os
 import os
-import unittest
-
-from ...constants import *  # NOQA
-from . import ArchiverTestCaseBase, RemoteArchiverTestCaseBase, ArchiverTestCaseBinaryBase, RK_ENCRYPTION, BORG_EXES
 
 
+import pytest
 
 
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_delete_repo(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("dir2/file2", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.cmd(f"--repo={self.repository_location}", "create", "test.2", "input")
-        os.environ["BORG_DELETE_I_KNOW_WHAT_I_AM_DOING"] = "no"
-        self.cmd(f"--repo={self.repository_location}", "rdelete", exit_code=2)
-        assert os.path.exists(self.repository_path)
-        os.environ["BORG_DELETE_I_KNOW_WHAT_I_AM_DOING"] = "YES"
-        self.cmd(f"--repo={self.repository_location}", "rdelete")
-        # Make sure the repo is gone
-        self.assertFalse(os.path.exists(self.repository_path))
-
+from ...constants import *  # NOQA
+from . import create_regular_file, cmd, RK_ENCRYPTION
 
 
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    """run the same tests, but with a remote repository"""
 
 
+@pytest.mark.parametrize("archivers", ["archiver", "remote_archiver", "binary_archiver"])
+def test_delete_repo(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, input_path = archiver.repository_location, archiver.repository_path, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "dir2/file2", size=1024 * 80)
 
 
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    """runs the same tests, but via the borg binary"""
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    cmd(archiver, f"--repo={repo_location}", "create", "test.2", "input")
+    os.environ["BORG_DELETE_I_KNOW_WHAT_I_AM_DOING"] = "no"
+    cmd(archiver, f"--repo={repo_location}", "rdelete", exit_code=2)
+    assert os.path.exists(repo_path)
+    os.environ["BORG_DELETE_I_KNOW_WHAT_I_AM_DOING"] = "YES"
+    cmd(archiver, f"--repo={repo_location}", "rdelete")
+    # Make sure the repo is gone
+    assert not os.path.exists(repo_path)

+ 363 - 306
src/borg/testsuite/archiver/recreate_cmd.py

@@ -1,314 +1,371 @@
 import os
 import os
 import re
 import re
-import unittest
 from datetime import datetime
 from datetime import datetime
 
 
 import pytest
 import pytest
 
 
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
 from .. import changedir, are_hardlinks_supported
 from .. import changedir, are_hardlinks_supported
-from . import ArchiverTestCaseBase, RemoteArchiverTestCaseBase, ArchiverTestCaseBinaryBase, RK_ENCRYPTION, BORG_EXES
-
-
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_recreate_exclude_caches(self):
-        self._create_test_caches()
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.cmd(f"--repo={self.repository_location}", "recreate", "-a", "test", "--exclude-caches")
-        self._assert_test_caches()
-
-    def test_recreate_exclude_tagged(self):
-        self._create_test_tagged()
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.cmd(
-            f"--repo={self.repository_location}",
-            "recreate",
-            "-a",
-            "test",
-            "--exclude-if-present",
-            ".NOBACKUP",
-            "--exclude-if-present",
-            "00-NOBACKUP",
-        )
-        self._assert_test_tagged()
-
-    def test_recreate_exclude_keep_tagged(self):
-        self._create_test_keep_tagged()
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.cmd(
-            f"--repo={self.repository_location}",
-            "recreate",
-            "-a",
-            "test",
-            "--exclude-if-present",
-            ".NOBACKUP1",
-            "--exclude-if-present",
-            ".NOBACKUP2",
-            "--exclude-caches",
-            "--keep-exclude-tags",
-        )
-        self._assert_test_keep_tagged()
-
-    @pytest.mark.skipif(not are_hardlinks_supported(), reason="hardlinks not supported")
-    def test_recreate_hardlinked_tags(self):  # test for issue #4911
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        self.create_regular_file("file1", contents=CACHE_TAG_CONTENTS)  # "wrong" filename, but correct tag contents
-        os.mkdir(os.path.join(self.input_path, "subdir"))  # to make sure the tag is encountered *after* file1
-        os.link(
-            os.path.join(self.input_path, "file1"), os.path.join(self.input_path, "subdir", CACHE_TAG_NAME)
-        )  # correct tag name, hardlink to file1
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        # in the "test" archive, we now have, in this order:
-        # - a regular file item for "file1"
-        # - a hardlink item for "CACHEDIR.TAG" referring back to file1 for its contents
-        self.cmd(f"--repo={self.repository_location}", "recreate", "test", "--exclude-caches", "--keep-exclude-tags")
-        # if issue #4911 is present, the recreate will crash with a KeyError for "input/file1"
-
-    def test_recreate_target_rc(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        output = self.cmd(f"--repo={self.repository_location}", "recreate", "--target=asdf", exit_code=2)
-        assert "Need to specify single archive" in output
-
-    def test_recreate_target(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.check_cache()
-        self.cmd(f"--repo={self.repository_location}", "create", "test0", "input")
-        self.check_cache()
-        original_archive = self.cmd(f"--repo={self.repository_location}", "rlist")
-        self.cmd(
-            f"--repo={self.repository_location}",
-            "recreate",
-            "test0",
-            "input/dir2",
-            "-e",
-            "input/dir2/file3",
-            "--target=new-archive",
-        )
-        self.check_cache()
-        archives = self.cmd(f"--repo={self.repository_location}", "rlist")
-        assert original_archive in archives
-        assert "new-archive" in archives
-
-        listing = self.cmd(f"--repo={self.repository_location}", "list", "new-archive", "--short")
-        assert "file1" not in listing
-        assert "dir2/file2" in listing
-        assert "dir2/file3" not in listing
-
-    def test_recreate_basic(self):
-        self.create_test_files()
-        self.create_regular_file("dir2/file3", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test0", "input")
-        self.cmd(f"--repo={self.repository_location}", "recreate", "test0", "input/dir2", "-e", "input/dir2/file3")
-        self.check_cache()
-        listing = self.cmd(f"--repo={self.repository_location}", "list", "test0", "--short")
-        assert "file1" not in listing
-        assert "dir2/file2" in listing
-        assert "dir2/file3" not in listing
-
-    @pytest.mark.skipif(not are_hardlinks_supported(), reason="hardlinks not supported")
-    def test_recreate_subtree_hardlinks(self):
-        # This is essentially the same problem set as in test_extract_hardlinks
-        self._extract_hardlinks_setup()
-        self.cmd(f"--repo={self.repository_location}", "create", "test2", "input")
-        self.cmd(f"--repo={self.repository_location}", "recreate", "-a", "test", "input/dir1")
-        self.check_cache()
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test")
-            assert os.stat("input/dir1/hardlink").st_nlink == 2
-            assert os.stat("input/dir1/subdir/hardlink").st_nlink == 2
-            assert os.stat("input/dir1/aaaa").st_nlink == 2
-            assert os.stat("input/dir1/source2").st_nlink == 2
-        with changedir("output"):
-            self.cmd(f"--repo={self.repository_location}", "extract", "test2")
-            assert os.stat("input/dir1/hardlink").st_nlink == 4
-
-    def test_recreate_rechunkify(self):
-        with open(os.path.join(self.input_path, "large_file"), "wb") as fd:
-            fd.write(b"a" * 280)
-            fd.write(b"b" * 280)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test1", "input", "--chunker-params", "7,9,8,128")
-        self.cmd(f"--repo={self.repository_location}", "create", "test2", "input", "--files-cache=disabled")
-        list = self.cmd(
-            f"--repo={self.repository_location}",
-            "list",
-            "test1",
-            "input/large_file",
-            "--format",
-            "{num_chunks} {unique_chunks}",
-        )
-        num_chunks, unique_chunks = map(int, list.split(" "))
-        # test1 and test2 do not deduplicate
-        assert num_chunks == unique_chunks
-        self.cmd(f"--repo={self.repository_location}", "recreate", "--chunker-params", "default")
-        self.check_cache()
-        # test1 and test2 do deduplicate after recreate
-        assert int(
-            self.cmd(f"--repo={self.repository_location}", "list", "test1", "input/large_file", "--format={size}")
-        )
-        assert not int(
-            self.cmd(
-                f"--repo={self.repository_location}", "list", "test1", "input/large_file", "--format", "{unique_chunks}"
-            )
-        )
-
-    def test_recreate_fixed_rechunkify(self):
-        with open(os.path.join(self.input_path, "file"), "wb") as fd:
-            fd.write(b"a" * 8192)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input", "--chunker-params", "7,9,8,128")
-        output = self.cmd(
-            f"--repo={self.repository_location}", "list", "test", "input/file", "--format", "{num_chunks}"
-        )
-        num_chunks = int(output)
-        assert num_chunks > 2
-        self.cmd(f"--repo={self.repository_location}", "recreate", "--chunker-params", "fixed,4096")
-        output = self.cmd(
-            f"--repo={self.repository_location}", "list", "test", "input/file", "--format", "{num_chunks}"
-        )
-        num_chunks = int(output)
-        assert num_chunks == 2
-
-    def test_recreate_no_rechunkify(self):
-        with open(os.path.join(self.input_path, "file"), "wb") as fd:
-            fd.write(b"a" * 8192)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        # first create an archive with non-default chunker params:
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input", "--chunker-params", "7,9,8,128")
-        output = self.cmd(
-            f"--repo={self.repository_location}", "list", "test", "input/file", "--format", "{num_chunks}"
-        )
-        num_chunks = int(output)
-        # now recreate the archive and do NOT specify chunker params:
-        output = self.cmd(
-            f"--repo={self.repository_location}",
-            "recreate",
-            "--debug",
-            "--exclude",
-            "filename_never_matches",
-            "-a",
-            "test",
-        )
-        assert "Rechunking" not in output  # we did not give --chunker-params, so it must not rechunk!
-        output = self.cmd(
-            f"--repo={self.repository_location}", "list", "test", "input/file", "--format", "{num_chunks}"
-        )
-        num_chunks_after_recreate = int(output)
-        assert num_chunks == num_chunks_after_recreate
-
-    def test_recreate_recompress(self):
-        self.create_regular_file("compressible", size=10000)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input", "-C", "none")
-        file_list = self.cmd(
-            f"--repo={self.repository_location}", "list", "test", "input/compressible", "--format", "{size} {sha256}"
-        )
-        size, sha256_before = file_list.split(" ")
-        self.cmd(f"--repo={self.repository_location}", "recreate", "-C", "lz4", "--recompress")
-        self.check_cache()
-        file_list = self.cmd(
-            f"--repo={self.repository_location}", "list", "test", "input/compressible", "--format", "{size} {sha256}"
-        )
-        size, sha256_after = file_list.split(" ")
-        assert sha256_before == sha256_after
-
-    def test_recreate_timestamp(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test0", "input")
-        self.cmd(
-            f"--repo={self.repository_location}",
-            "recreate",
-            "test0",
-            "--timestamp",
-            "1970-01-02T00:00:00",
-            "--comment",
-            "test",
-        )
-        info = self.cmd(f"--repo={self.repository_location}", "info", "-a", "test0").splitlines()
-        dtime = datetime(1970, 1, 2, 0, 0, 0).astimezone()  # local time in local timezone
-        s_time = dtime.strftime("%Y-%m-%d %H:%M:.. %z").replace("+", r"\+")
-        assert any([re.search(r"Time \(start\).+ %s" % s_time, item) for item in info])
-        assert any([re.search(r"Time \(end\).+ %s" % s_time, item) for item in info])
-
-    def test_recreate_dry_run(self):
-        self.create_regular_file("compressible", size=10000)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        archives_before = self.cmd(f"--repo={self.repository_location}", "list", "test")
-        self.cmd(f"--repo={self.repository_location}", "recreate", "-n", "-e", "input/compressible")
-        self.check_cache()
-        archives_after = self.cmd(f"--repo={self.repository_location}", "list", "test")
-        assert archives_after == archives_before
-
-    def test_recreate_skips_nothing_to_do(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        info_before = self.cmd(f"--repo={self.repository_location}", "info", "-a", "test")
-        self.cmd(f"--repo={self.repository_location}", "recreate", "--chunker-params", "default")
-        self.check_cache()
-        info_after = self.cmd(f"--repo={self.repository_location}", "info", "-a", "test")
-        assert info_before == info_after  # includes archive ID
-
-    def test_recreate_list_output(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=0)
-        self.create_regular_file("file2", size=0)
-        self.create_regular_file("file3", size=0)
-        self.create_regular_file("file4", size=0)
-        self.create_regular_file("file5", size=0)
-
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-
-        output = self.cmd(
-            f"--repo={self.repository_location}", "recreate", "-a", "test", "--list", "--info", "-e", "input/file2"
-        )
-        self.check_cache()
-        self.assert_in("input/file1", output)
-        self.assert_in("- input/file2", output)
-
-        output = self.cmd(f"--repo={self.repository_location}", "recreate", "-a", "test", "--list", "-e", "input/file3")
-        self.check_cache()
-        self.assert_in("input/file1", output)
-        self.assert_in("- input/file3", output)
-
-        output = self.cmd(f"--repo={self.repository_location}", "recreate", "-a", "test", "-e", "input/file4")
-        self.check_cache()
-        self.assert_not_in("input/file1", output)
-        self.assert_not_in("- input/file4", output)
-
-        output = self.cmd(f"--repo={self.repository_location}", "recreate", "-a", "test", "--info", "-e", "input/file5")
-        self.check_cache()
-        self.assert_not_in("input/file1", output)
-        self.assert_not_in("- input/file5", output)
-
-    def test_comment(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test1", "input")
-        self.cmd(f"--repo={self.repository_location}", "create", "test2", "input", "--comment", "this is the comment")
-        self.cmd(f"--repo={self.repository_location}", "create", "test3", "input", "--comment", '"deleted" comment')
-        self.cmd(f"--repo={self.repository_location}", "create", "test4", "input", "--comment", "preserved comment")
-        assert "Comment: " + os.linesep in self.cmd(f"--repo={self.repository_location}", "info", "-a", "test1")
-        assert "Comment: this is the comment" in self.cmd(f"--repo={self.repository_location}", "info", "-a", "test2")
-
-        self.cmd(f"--repo={self.repository_location}", "recreate", "-a", "test1", "--comment", "added comment")
-        self.cmd(f"--repo={self.repository_location}", "recreate", "-a", "test2", "--comment", "modified comment")
-        self.cmd(f"--repo={self.repository_location}", "recreate", "-a", "test3", "--comment", "")
-        self.cmd(f"--repo={self.repository_location}", "recreate", "-a", "test4", "12345")
-        assert "Comment: added comment" in self.cmd(f"--repo={self.repository_location}", "info", "-a", "test1")
-        assert "Comment: modified comment" in self.cmd(f"--repo={self.repository_location}", "info", "-a", "test2")
-        assert "Comment: " + os.linesep in self.cmd(f"--repo={self.repository_location}", "info", "-a", "test3")
-        assert "Comment: preserved comment" in self.cmd(f"--repo={self.repository_location}", "info", "-a", "test4")
-
-
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    """run the same tests, but with a remote repository"""
-
-
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    """runs the same tests, but via the borg binary"""
+from . import (
+    _create_test_caches,
+    _create_test_tagged,
+    _create_test_keep_tagged,
+    _assert_test_caches,
+    _assert_test_tagged,
+    _assert_test_keep_tagged,
+    _extract_hardlinks_setup,
+    generate_archiver_tests,
+    check_cache,
+    cmd,
+    create_regular_file,
+    create_test_files,
+    RK_ENCRYPTION,
+)
+
+pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary")  # NOQA
+
+
+def test_recreate_exclude_caches(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    _create_test_caches(archiver)
+
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    cmd(archiver, f"--repo={repo_location}", "recreate", "-a", "test", "--exclude-caches")
+    _assert_test_caches(archiver)
+
+
+def test_recreate_exclude_tagged(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    _create_test_tagged(archiver)
+
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "recreate",
+        "-a",
+        "test",
+        "--exclude-if-present",
+        ".NOBACKUP",
+        "--exclude-if-present",
+        "00-NOBACKUP",
+    )
+    _assert_test_tagged(archiver)
+
+
+def test_recreate_exclude_keep_tagged(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    _create_test_keep_tagged(archiver)
+
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "recreate",
+        "-a",
+        "test",
+        "--exclude-if-present",
+        ".NOBACKUP1",
+        "--exclude-if-present",
+        ".NOBACKUP2",
+        "--exclude-caches",
+        "--keep-exclude-tags",
+    )
+    _assert_test_keep_tagged(archiver)
+
+
+@pytest.mark.skipif(not are_hardlinks_supported(), reason="hardlinks not supported")
+def test_recreate_hardlinked_tags(archivers, request):  # test for issue #4911
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    create_regular_file(input_path, "file1", contents=CACHE_TAG_CONTENTS)  # "wrong" filename, but correct tag contents
+    os.mkdir(os.path.join(input_path, "subdir"))  # to make sure the tag is encountered *after* file1
+    os.link(
+        os.path.join(input_path, "file1"), os.path.join(input_path, "subdir", CACHE_TAG_NAME)
+    )  # correct tag name, hardlink to file1
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    # in the "test" archive, we now have, in this order:
+    # - a regular file item for "file1"
+    # - a hardlink item for "CACHEDIR.TAG" referring back to file1 for its contents
+    cmd(archiver, f"--repo={repo_location}", "recreate", "test", "--exclude-caches", "--keep-exclude-tags")
+    # if issue #4911 is present, the recreate will crash with a KeyError for "input/file1"
+
+
+def test_recreate_target_rc(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    output = cmd(archiver, f"--repo={repo_location}", "recreate", "--target=asdf", exit_code=2)
+    assert "Need to specify single archive" in output
+
+
+def test_recreate_target(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_test_files(input_path)
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    check_cache(archiver)
+    cmd(archiver, f"--repo={repo_location}", "create", "test0", "input")
+    check_cache(archiver)
+    original_archive = cmd(archiver, f"--repo={repo_location}", "rlist")
+    cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "recreate",
+        "test0",
+        "input/dir2",
+        "-e",
+        "input/dir2/file3",
+        "--target=new-archive",
+    )
+    check_cache(archiver)
+    archives = cmd(archiver, f"--repo={repo_location}", "rlist")
+    assert original_archive in archives
+    assert "new-archive" in archives
+
+    listing = cmd(archiver, f"--repo={repo_location}", "list", "new-archive", "--short")
+    assert "file1" not in listing
+    assert "dir2/file2" in listing
+    assert "dir2/file3" not in listing
+
+
+def test_recreate_basic(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_test_files(input_path)
+    create_regular_file(input_path, "dir2/file3", size=1024 * 80)
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test0", "input")
+    cmd(archiver, f"--repo={repo_location}", "recreate", "test0", "input/dir2", "-e", "input/dir2/file3")
+    check_cache(archiver)
+    listing = cmd(archiver, f"--repo={repo_location}", "list", "test0", "--short")
+    assert "file1" not in listing
+    assert "dir2/file2" in listing
+    assert "dir2/file3" not in listing
+
+
+@pytest.mark.skipif(not are_hardlinks_supported(), reason="hardlinks not supported")
+def test_recreate_subtree_hardlinks(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+
+    # This is essentially the same problem set as in test_extract_hardlinks
+    _extract_hardlinks_setup(archiver)
+    cmd(archiver, f"--repo={repo_location}", "create", "test2", "input")
+    cmd(archiver, f"--repo={repo_location}", "recreate", "-a", "test", "input/dir1")
+    check_cache(archiver)
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test")
+        assert os.stat("input/dir1/hardlink").st_nlink == 2
+        assert os.stat("input/dir1/subdir/hardlink").st_nlink == 2
+        assert os.stat("input/dir1/aaaa").st_nlink == 2
+        assert os.stat("input/dir1/source2").st_nlink == 2
+    with changedir("output"):
+        cmd(archiver, f"--repo={repo_location}", "extract", "test2")
+        assert os.stat("input/dir1/hardlink").st_nlink == 4
+
+
+def test_recreate_rechunkify(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    with open(os.path.join(input_path, "large_file"), "wb") as fd:
+        fd.write(b"a" * 280)
+        fd.write(b"b" * 280)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test1", "input", "--chunker-params", "7,9,8,128")
+    cmd(archiver, f"--repo={repo_location}", "create", "test2", "input", "--files-cache=disabled")
+    chunks_list = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "list",
+        "test1",
+        "input/large_file",
+        "--format",
+        "{num_chunks} {unique_chunks}",
+    )
+    num_chunks, unique_chunks = map(int, chunks_list.split(" "))
+    # test1 and test2 do not deduplicate
+    assert num_chunks == unique_chunks
+    cmd(archiver, f"--repo={repo_location}", "recreate", "--chunker-params", "default")
+    check_cache(archiver)
+    # test1 and test2 do deduplicate after recreate
+    assert int(cmd(archiver, f"--repo={repo_location}", "list", "test1", "input/large_file", "--format={size}"))
+    assert not int(
+        cmd(archiver, f"--repo={repo_location}", "list", "test1", "input/large_file", "--format", "{unique_chunks}")
+    )
+
+
+def test_recreate_fixed_rechunkify(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    with open(os.path.join(input_path, "file"), "wb") as fd:
+        fd.write(b"a" * 8192)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input", "--chunker-params", "7,9,8,128")
+    output = cmd(archiver, f"--repo={repo_location}", "list", "test", "input/file", "--format", "{num_chunks}")
+    num_chunks = int(output)
+    assert num_chunks > 2
+    cmd(archiver, f"--repo={repo_location}", "recreate", "--chunker-params", "fixed,4096")
+    output = cmd(archiver, f"--repo={repo_location}", "list", "test", "input/file", "--format", "{num_chunks}")
+    num_chunks = int(output)
+    assert num_chunks == 2
+
+
+def test_recreate_no_rechunkify(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    with open(os.path.join(input_path, "file"), "wb") as fd:
+        fd.write(b"a" * 8192)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    # first create an archive with non-default chunker params:
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input", "--chunker-params", "7,9,8,128")
+    output = cmd(archiver, f"--repo={repo_location}", "list", "test", "input/file", "--format", "{num_chunks}")
+    num_chunks = int(output)
+    # now recreate the archive and do NOT specify chunker params:
+    output = cmd(
+        archiver, f"--repo={repo_location}", "recreate", "--debug", "--exclude", "filename_never_matches", "-a", "test"
+    )
+    assert "Rechunking" not in output  # we did not give --chunker-params, so it must not rechunk!
+    output = cmd(archiver, f"--repo={repo_location}", "list", "test", "input/file", "--format", "{num_chunks}")
+    num_chunks_after_recreate = int(output)
+    assert num_chunks == num_chunks_after_recreate
+
+
+def test_recreate_recompress(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "compressible", size=10000)
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input", "-C", "none")
+    file_list = cmd(
+        archiver, f"--repo={repo_location}", "list", "test", "input/compressible", "--format", "{size} {sha256}"
+    )
+    size, sha256_before = file_list.split(" ")
+    cmd(archiver, f"--repo={repo_location}", "recreate", "-C", "lz4", "--recompress")
+    check_cache(archiver)
+    file_list = cmd(
+        archiver, f"--repo={repo_location}", "list", "test", "input/compressible", "--format", "{size} {sha256}"
+    )
+    size, sha256_after = file_list.split(" ")
+    assert sha256_before == sha256_after
+
+
+def test_recreate_timestamp(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    create_test_files(input_path)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test0", "input")
+    cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "recreate",
+        "test0",
+        "--timestamp",
+        "1970-01-02T00:00:00",
+        "--comment",
+        "test",
+    )
+    info = cmd(archiver, f"--repo={repo_location}", "info", "-a", "test0").splitlines()
+    dtime = datetime(1970, 1, 2, 0, 0, 0).astimezone()  # local time in local timezone
+    s_time = dtime.strftime("%Y-%m-%d %H:%M:.. %z").replace("+", r"\+")
+    assert any([re.search(r"Time \(start\).+ %s" % s_time, item) for item in info])
+    assert any([re.search(r"Time \(end\).+ %s" % s_time, item) for item in info])
+
+
+def test_recreate_dry_run(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "compressible", size=10000)
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    archives_before = cmd(archiver, f"--repo={repo_location}", "list", "test")
+    cmd(archiver, f"--repo={repo_location}", "recreate", "-n", "-e", "input/compressible")
+    check_cache(archiver)
+    archives_after = cmd(archiver, f"--repo={repo_location}", "list", "test")
+    assert archives_after == archives_before
+
+
+def test_recreate_skips_nothing_to_do(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    info_before = cmd(archiver, f"--repo={repo_location}", "info", "-a", "test")
+    cmd(archiver, f"--repo={repo_location}", "recreate", "--chunker-params", "default")
+    check_cache(archiver)
+    info_after = cmd(archiver, f"--repo={repo_location}", "info", "-a", "test")
+    assert info_before == info_after  # includes archive ID
+
+
+def test_recreate_list_output(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=0)
+    create_regular_file(input_path, "file2", size=0)
+    create_regular_file(input_path, "file3", size=0)
+    create_regular_file(input_path, "file4", size=0)
+    create_regular_file(input_path, "file5", size=0)
+
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+
+    output = cmd(archiver, f"--repo={repo_location}", "recreate", "-a", "test", "--list", "--info", "-e", "input/file2")
+    check_cache(archiver)
+    assert "input/file1" in output
+    assert "- input/file2" in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "recreate", "-a", "test", "--list", "-e", "input/file3")
+    check_cache(archiver)
+    assert "input/file1" in output
+    assert "- input/file3" in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "recreate", "-a", "test", "-e", "input/file4")
+    check_cache(archiver)
+    assert "input/file1" not in output
+    assert "- input/file4" not in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "recreate", "-a", "test", "--info", "-e", "input/file5")
+    check_cache(archiver)
+    assert "input/file1" not in output
+    assert "- input/file5" not in output
+
+
+def test_comment(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test1", "input")
+    cmd(archiver, f"--repo={repo_location}", "create", "test2", "input", "--comment", "this is the comment")
+    cmd(archiver, f"--repo={repo_location}", "create", "test3", "input", "--comment", '"deleted" comment')
+    cmd(archiver, f"--repo={repo_location}", "create", "test4", "input", "--comment", "preserved comment")
+    assert "Comment: " + os.linesep in cmd(archiver, f"--repo={repo_location}", "info", "-a", "test1")
+    assert "Comment: this is the comment" in cmd(archiver, f"--repo={repo_location}", "info", "-a", "test2")
+
+    cmd(archiver, f"--repo={repo_location}", "recreate", "-a", "test1", "--comment", "added comment")
+    cmd(archiver, f"--repo={repo_location}", "recreate", "-a", "test2", "--comment", "modified comment")
+    cmd(archiver, f"--repo={repo_location}", "recreate", "-a", "test3", "--comment", "")
+    cmd(archiver, f"--repo={repo_location}", "recreate", "-a", "test4", "12345")
+    assert "Comment: added comment" in cmd(archiver, f"--repo={repo_location}", "info", "-a", "test1")
+    assert "Comment: modified comment" in cmd(archiver, f"--repo={repo_location}", "info", "-a", "test2")
+    assert "Comment: " + os.linesep in cmd(archiver, f"--repo={repo_location}", "info", "-a", "test3")
+    assert "Comment: preserved comment" in cmd(archiver, f"--repo={repo_location}", "info", "-a", "test4")

+ 24 - 31
src/borg/testsuite/archiver/rename_cmd.py

@@ -1,37 +1,30 @@
-import unittest
+import pytest
 
 
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
 from ...manifest import Manifest
 from ...manifest import Manifest
 from ...repository import Repository
 from ...repository import Repository
-from . import ArchiverTestCaseBase, RemoteArchiverTestCaseBase, ArchiverTestCaseBinaryBase, RK_ENCRYPTION, BORG_EXES
+from . import cmd, create_regular_file, RK_ENCRYPTION
 
 
 
 
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_rename(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.create_regular_file("dir2/file2", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.cmd(f"--repo={self.repository_location}", "create", "test.2", "input")
-        self.cmd(f"--repo={self.repository_location}", "extract", "test", "--dry-run")
-        self.cmd(f"--repo={self.repository_location}", "extract", "test.2", "--dry-run")
-        self.cmd(f"--repo={self.repository_location}", "rename", "test", "test.3")
-        self.cmd(f"--repo={self.repository_location}", "extract", "test.2", "--dry-run")
-        self.cmd(f"--repo={self.repository_location}", "rename", "test.2", "test.4")
-        self.cmd(f"--repo={self.repository_location}", "extract", "test.3", "--dry-run")
-        self.cmd(f"--repo={self.repository_location}", "extract", "test.4", "--dry-run")
-        # Make sure both archives have been renamed
-        with Repository(self.repository_path) as repository:
-            manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
-        self.assert_equal(len(manifest.archives), 2)
-        self.assert_in("test.3", manifest.archives)
-        self.assert_in("test.4", manifest.archives)
-
-
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    """run the same tests, but with a remote repository"""
-
-
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    """runs the same tests, but via the borg binary"""
+@pytest.mark.parametrize("archivers", ["archiver", "remote_archiver", "binary_archiver"])
+def test_rename(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, repo_path, input_path = archiver.repository_location, archiver.repository_path, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+    create_regular_file(input_path, "dir2/file2", size=1024 * 80)
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    cmd(archiver, f"--repo={repo_location}", "create", "test.2", "input")
+    cmd(archiver, f"--repo={repo_location}", "extract", "test", "--dry-run")
+    cmd(archiver, f"--repo={repo_location}", "extract", "test.2", "--dry-run")
+    cmd(archiver, f"--repo={repo_location}", "rename", "test", "test.3")
+    cmd(archiver, f"--repo={repo_location}", "extract", "test.2", "--dry-run")
+    cmd(archiver, f"--repo={repo_location}", "rename", "test.2", "test.4")
+    cmd(archiver, f"--repo={repo_location}", "extract", "test.3", "--dry-run")
+    cmd(archiver, f"--repo={repo_location}", "extract", "test.4", "--dry-run")
+    # Make sure both archives have been renamed
+    with Repository(repo_path) as repository:
+        manifest = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+    assert len(manifest.archives) == 2
+    assert "test.3" in manifest.archives
+    assert "test.4" in manifest.archives

+ 7 - 7
src/borg/testsuite/archiver/return_codes.py

@@ -1,20 +1,20 @@
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
-from . import cmd, changedir
+from . import cmd_fixture, changedir
 
 
 
 
-def test_return_codes(cmd, tmpdir):
+def test_return_codes(cmd_fixture, tmpdir):
     repo = tmpdir.mkdir("repo")
     repo = tmpdir.mkdir("repo")
     input = tmpdir.mkdir("input")
     input = tmpdir.mkdir("input")
     output = tmpdir.mkdir("output")
     output = tmpdir.mkdir("output")
     input.join("test_file").write("content")
     input.join("test_file").write("content")
-    rc, out = cmd("--repo=%s" % str(repo), "rcreate", "--encryption=none")
+    rc, out = cmd_fixture("--repo=%s" % str(repo), "rcreate", "--encryption=none")
     assert rc == EXIT_SUCCESS
     assert rc == EXIT_SUCCESS
-    rc, out = cmd("--repo=%s" % repo, "create", "archive", str(input))
+    rc, out = cmd_fixture("--repo=%s" % repo, "create", "archive", str(input))
     assert rc == EXIT_SUCCESS
     assert rc == EXIT_SUCCESS
     with changedir(str(output)):
     with changedir(str(output)):
-        rc, out = cmd("--repo=%s" % repo, "extract", "archive")
+        rc, out = cmd_fixture("--repo=%s" % repo, "extract", "archive")
         assert rc == EXIT_SUCCESS
         assert rc == EXIT_SUCCESS
-    rc, out = cmd("--repo=%s" % repo, "extract", "archive", "does/not/match")
+    rc, out = cmd_fixture("--repo=%s" % repo, "extract", "archive", "does/not/match")
     assert rc == EXIT_WARNING  # pattern did not match
     assert rc == EXIT_WARNING  # pattern did not match
-    rc, out = cmd("--repo=%s" % repo, "create", "archive", str(input))
+    rc, out = cmd_fixture("--repo=%s" % repo, "create", "archive", str(input))
     assert rc == EXIT_ERROR  # duplicate archive name
     assert rc == EXIT_ERROR  # duplicate archive name

+ 58 - 57
src/borg/testsuite/archiver/rinfo_cmd.py

@@ -1,61 +1,62 @@
 import json
 import json
 from random import randbytes
 from random import randbytes
-import unittest
 
 
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
-from . import (
-    ArchiverTestCaseBase,
-    RemoteArchiverTestCaseBase,
-    ArchiverTestCaseBinaryBase,
-    RK_ENCRYPTION,
-    BORG_EXES,
-    checkts,
-)
-
-
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_info(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        info_repo = self.cmd(f"--repo={self.repository_location}", "rinfo")
-        assert "Original size:" in info_repo
-
-    def test_info_json(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        info_repo = json.loads(self.cmd(f"--repo={self.repository_location}", "rinfo", "--json"))
-        repository = info_repo["repository"]
-        assert len(repository["id"]) == 64
-        assert "last_modified" in repository
-        checkts(repository["last_modified"])
-        assert info_repo["encryption"]["mode"] == RK_ENCRYPTION[13:]
-        assert "keyfile" not in info_repo["encryption"]
-        cache = info_repo["cache"]
-        stats = cache["stats"]
-        assert all(isinstance(o, int) for o in stats.values())
-        assert all(key in stats for key in ("total_chunks", "total_size", "total_unique_chunks", "unique_size"))
-
-    def test_info_on_repository_with_storage_quota(self):
-        self.create_regular_file("file1", contents=randbytes(1000 * 1000))
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION, "--storage-quota=1G")
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        info_repo = self.cmd(f"--repo={self.repository_location}", "rinfo")
-        assert "Storage quota: 1.00 MB used out of 1.00 GB" in info_repo
-
-    def test_info_on_repository_without_storage_quota(self):
-        self.create_regular_file("file1", contents=randbytes(1000 * 1000))
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        info_repo = self.cmd(f"--repo={self.repository_location}", "rinfo")
-        assert "Storage quota: 1.00 MB used" in info_repo
-
-
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    """run the same tests, but with a remote repository"""
-
-
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    """runs the same tests, but via the borg binary"""
+from . import checkts, cmd, create_regular_file, generate_archiver_tests, RK_ENCRYPTION
+
+pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary")  # NOQA
+
+
+def test_info(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    info_repo = cmd(archiver, f"--repo={repo_location}", "rinfo")
+    assert "Original size:" in info_repo
+
+
+def test_info_json(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    info_repo = json.loads(cmd(archiver, f"--repo={repo_location}", "rinfo", "--json"))
+    repository = info_repo["repository"]
+    assert len(repository["id"]) == 64
+    assert "last_modified" in repository
+
+    checkts(repository["last_modified"])
+    assert info_repo["encryption"]["mode"] == RK_ENCRYPTION[13:]
+    assert "keyfile" not in info_repo["encryption"]
+
+    cache = info_repo["cache"]
+    stats = cache["stats"]
+    assert all(isinstance(o, int) for o in stats.values())
+    assert all(key in stats for key in ("total_chunks", "total_size", "total_unique_chunks", "unique_size"))
+
+
+def test_info_on_repository_with_storage_quota(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", contents=randbytes(1000 * 1000))
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION, "--storage-quota=1G")
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    info_repo = cmd(archiver, f"--repo={repo_location}", "rinfo")
+    assert "Storage quota: 1.00 MB used out of 1.00 GB" in info_repo
+
+
+def test_info_on_repository_without_storage_quota(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", contents=randbytes(1000 * 1000))
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    info_repo = cmd(archiver, f"--repo={repo_location}", "rinfo")
+    assert "Storage quota: 1.00 MB used" in info_repo

+ 123 - 114
src/borg/testsuite/archiver/rlist_cmd.py

@@ -1,118 +1,127 @@
 import json
 import json
 import os
 import os
-import unittest
 
 
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
-from . import (
-    ArchiverTestCaseBase,
-    RemoteArchiverTestCaseBase,
-    ArchiverTestCaseBinaryBase,
-    src_dir,
-    RK_ENCRYPTION,
-    checkts,
-    BORG_EXES,
-)
-
-
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_rlist_glob(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test-1", src_dir)
-        self.cmd(f"--repo={self.repository_location}", "create", "something-else-than-test-1", src_dir)
-        self.cmd(f"--repo={self.repository_location}", "create", "test-2", src_dir)
-        output = self.cmd(f"--repo={self.repository_location}", "rlist", "--match-archives=sh:test-*")
-        self.assert_in("test-1", output)
-        self.assert_in("test-2", output)
-        self.assert_not_in("something-else", output)
-
-    def test_archives_format(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "--comment", "comment 1", "test-1", src_dir)
-        self.cmd(f"--repo={self.repository_location}", "create", "--comment", "comment 2", "test-2", src_dir)
-        output_1 = self.cmd(f"--repo={self.repository_location}", "rlist")
-        output_2 = self.cmd(
-            f"--repo={self.repository_location}", "rlist", "--format", "{archive:<36} {time} [{id}]{NL}"
-        )
-        self.assertEqual(output_1, output_2)
-        output_1 = self.cmd(f"--repo={self.repository_location}", "rlist", "--short")
-        self.assertEqual(output_1, "test-1" + os.linesep + "test-2" + os.linesep)
-        output_3 = self.cmd(f"--repo={self.repository_location}", "rlist", "--format", "{name} {comment}{NL}")
-        self.assert_in("test-1 comment 1" + os.linesep, output_3)
-        self.assert_in("test-2 comment 2" + os.linesep, output_3)
-
-    def test_size_nfiles(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.create_regular_file("file1", size=123000)
-        self.create_regular_file("file2", size=456)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input/file1", "input/file2")
-        output = self.cmd(f"--repo={self.repository_location}", "list", "test")
-        print(output)
-        output = self.cmd(f"--repo={self.repository_location}", "rlist", "--format", "{name} {nfiles} {size}")
-        o_t = output.split()
-        assert o_t[0] == "test"
-        assert int(o_t[1]) == 2
-        assert 123456 <= int(o_t[2]) < 123999  # there is some metadata overhead
-
-    def test_date_matching(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        earliest_ts = "2022-11-20T23:59:59"
-        ts_in_between = "2022-12-18T23:59:59"
-        self.create_src_archive("archive1", ts=earliest_ts)
-        self.create_src_archive("archive2", ts=ts_in_between)
-        self.create_src_archive("archive3")
-        output = self.cmd(f"--repo={self.repository_location}", "rlist", "-v", "--oldest=23e", exit_code=2)
-        output = self.cmd(f"--repo={self.repository_location}", "rlist", "-v", "--oldest=1m", exit_code=0)
-        self.assert_in("archive1", output)
-        self.assert_in("archive2", output)
-        self.assert_not_in("archive3", output)
-
-        output = self.cmd(f"--repo={self.repository_location}", "rlist", "-v", "--newest=1m", exit_code=0)
-        self.assert_in("archive3", output)
-        self.assert_not_in("archive2", output)
-        self.assert_not_in("archive1", output)
-        output = self.cmd(f"--repo={self.repository_location}", "rlist", "-v", "--newer=1d", exit_code=0)
-        self.assert_in("archive3", output)
-        self.assert_not_in("archive1", output)
-        self.assert_not_in("archive2", output)
-        output = self.cmd(f"--repo={self.repository_location}", "rlist", "-v", "--older=1d", exit_code=0)
-        self.assert_in("archive1", output)
-        self.assert_in("archive2", output)
-        self.assert_not_in("archive3", output)
-
-    def test_rlist_consider_checkpoints(self):
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test1", src_dir)
-        # these are not really a checkpoints, but they look like some:
-        self.cmd(f"--repo={self.repository_location}", "create", "test2.checkpoint", src_dir)
-        self.cmd(f"--repo={self.repository_location}", "create", "test3.checkpoint.1", src_dir)
-        output = self.cmd(f"--repo={self.repository_location}", "rlist")
-        assert "test1" in output
-        assert "test2.checkpoint" not in output
-        assert "test3.checkpoint.1" not in output
-        output = self.cmd(f"--repo={self.repository_location}", "rlist", "--consider-checkpoints")
-        assert "test1" in output
-        assert "test2.checkpoint" in output
-        assert "test3.checkpoint.1" in output
-
-    def test_rlist_json(self):
-        self.create_regular_file("file1", size=1024 * 80)
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-
-        list_repo = json.loads(self.cmd(f"--repo={self.repository_location}", "rlist", "--json"))
-        repository = list_repo["repository"]
-        assert len(repository["id"]) == 64
-        checkts(repository["last_modified"])
-        assert list_repo["encryption"]["mode"] == RK_ENCRYPTION[13:]
-        assert "keyfile" not in list_repo["encryption"]
-        archive0 = list_repo["archives"][0]
-        checkts(archive0["time"])
-
-
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    """run the same tests, but with a remote repository"""
-
-
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    """runs the same tests, but via the borg binary"""
+from . import cmd, checkts, create_src_archive, create_regular_file, src_dir, generate_archiver_tests, RK_ENCRYPTION
+
+pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary")  # NOQA
+
+
+def test_rlist_glob(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test-1", src_dir)
+    cmd(archiver, f"--repo={repo_location}", "create", "something-else-than-test-1", src_dir)
+    cmd(archiver, f"--repo={repo_location}", "create", "test-2", src_dir)
+
+    output = cmd(archiver, f"--repo={repo_location}", "rlist", "--match-archives=sh:test-*")
+    assert "test-1" in output
+    assert "test-2" in output
+    assert "something-else" not in output
+
+
+def test_archives_format(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "--comment", "comment 1", "test-1", src_dir)
+    cmd(archiver, f"--repo={repo_location}", "create", "--comment", "comment 2", "test-2", src_dir)
+    output_1 = cmd(archiver, f"--repo={repo_location}", "rlist")
+    output_2 = cmd(archiver, f"--repo={repo_location}", "rlist", "--format", "{archive:<36} {time} [{id}]{NL}")
+    assert output_1 == output_2
+    output_1 = cmd(archiver, f"--repo={repo_location}", "rlist", "--short")
+    assert output_1 == "test-1" + os.linesep + "test-2" + os.linesep
+    output_3 = cmd(archiver, f"--repo={repo_location}", "rlist", "--format", "{name} {comment}{NL}")
+    assert "test-1 comment 1" + os.linesep in output_3
+    assert "test-2 comment 2" + os.linesep in output_3
+
+
+def test_size_nfiles(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    create_regular_file(input_path, "file1", size=123000)
+    create_regular_file(input_path, "file2", size=456)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input/file1", "input/file2")
+    output = cmd(archiver, f"--repo={repo_location}", "list", "test")
+    print(output)
+    output = cmd(archiver, f"--repo={repo_location}", "rlist", "--format", "{name} {nfiles} {size}")
+    o_t = output.split()
+    assert o_t[0] == "test"
+    assert int(o_t[1]) == 2
+    assert 123456 <= int(o_t[2]) < 123999  # there is some metadata overhead
+
+
+def test_date_matching(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    earliest_ts = "2022-11-20T23:59:59"
+    ts_in_between = "2022-12-18T23:59:59"
+    create_src_archive(archiver, "archive1", ts=earliest_ts)
+    create_src_archive(archiver, "archive2", ts=ts_in_between)
+    create_src_archive(archiver, "archive3")
+    cmd(archiver, f"--repo={repo_location}", "rlist", "-v", "--oldest=23e", exit_code=2)
+
+    output = cmd(archiver, f"--repo={repo_location}", "rlist", "-v", "--oldest=1m", exit_code=0)
+    assert "archive1" in output
+    assert "archive2" in output
+    assert "archive3" not in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "rlist", "-v", "--newest=1m", exit_code=0)
+    assert "archive3" in output
+    assert "archive2" not in output
+    assert "archive1" not in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "rlist", "-v", "--newer=1d", exit_code=0)
+    assert "archive3" in output
+    assert "archive1" not in output
+    assert "archive2" not in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "rlist", "-v", "--older=1d", exit_code=0)
+    assert "archive1" in output
+    assert "archive2" in output
+    assert "archive3" not in output
+
+
+def test_rlist_consider_checkpoints(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test1", src_dir)
+    # these are not really a checkpoints, but they look like some:
+    cmd(archiver, f"--repo={repo_location}", "create", "test2.checkpoint", src_dir)
+    cmd(archiver, f"--repo={repo_location}", "create", "test3.checkpoint.1", src_dir)
+
+    output = cmd(archiver, f"--repo={repo_location}", "rlist")
+    assert "test1" in output
+    assert "test2.checkpoint" not in output
+    assert "test3.checkpoint.1" not in output
+
+    output = cmd(archiver, f"--repo={repo_location}", "rlist", "--consider-checkpoints")
+    assert "test1" in output
+    assert "test2.checkpoint" in output
+    assert "test3.checkpoint.1" in output
+
+
+def test_rlist_json(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_regular_file(input_path, "file1", size=1024 * 80)
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    list_repo = json.loads(cmd(archiver, f"--repo={repo_location}", "rlist", "--json"))
+    repository = list_repo["repository"]
+    assert len(repository["id"]) == 64
+    checkts(repository["last_modified"])
+    assert list_repo["encryption"]["mode"] == RK_ENCRYPTION[13:]
+    assert "keyfile" not in list_repo["encryption"]
+    archive0 = list_repo["archives"][0]
+    checkts(archive0["time"])

+ 3 - 0
src/borg/testsuite/archiver/serve_cmd.py

@@ -34,11 +34,14 @@ def serve_socket(monkeypatch):
 def test_with_socket(serve_socket, tmpdir, monkeypatch):
 def test_with_socket(serve_socket, tmpdir, monkeypatch):
     have_a_short_runtime_dir(monkeypatch)
     have_a_short_runtime_dir(monkeypatch)
     repo_path = str(tmpdir.join("repo"))
     repo_path = str(tmpdir.join("repo"))
+
     ret, output = exec_cmd(f"--socket={serve_socket}", f"--repo=socket://{repo_path}", "rcreate", "--encryption=none")
     ret, output = exec_cmd(f"--socket={serve_socket}", f"--repo=socket://{repo_path}", "rcreate", "--encryption=none")
     assert ret == 0
     assert ret == 0
+
     ret, output = exec_cmd(f"--socket={serve_socket}", f"--repo=socket://{repo_path}", "rinfo")
     ret, output = exec_cmd(f"--socket={serve_socket}", f"--repo=socket://{repo_path}", "rinfo")
     assert ret == 0
     assert ret == 0
     assert "Repository ID: " in output
     assert "Repository ID: " in output
+
     monkeypatch.setenv("BORG_DELETE_I_KNOW_WHAT_I_AM_DOING", "YES")
     monkeypatch.setenv("BORG_DELETE_I_KNOW_WHAT_I_AM_DOING", "YES")
     ret, output = exec_cmd(f"--socket={serve_socket}", f"--repo=socket://{repo_path}", "rdelete")
     ret, output = exec_cmd(f"--socket={serve_socket}", f"--repo=socket://{repo_path}", "rdelete")
     assert ret == 0
     assert ret == 0

+ 253 - 224
src/borg/testsuite/archiver/tar_cmds.py

@@ -1,20 +1,15 @@
 import os
 import os
 import shutil
 import shutil
 import subprocess
 import subprocess
-import unittest
 
 
 import pytest
 import pytest
 
 
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
 from .. import changedir
 from .. import changedir
-from . import (
-    ArchiverTestCaseBase,
-    RemoteArchiverTestCaseBase,
-    ArchiverTestCaseBinaryBase,
-    RK_ENCRYPTION,
-    requires_hardlinks,
-    BORG_EXES,
-)
+from . import assert_dirs_equal, _extract_hardlinks_setup, cmd, create_test_files, requires_hardlinks, RK_ENCRYPTION
+from . import generate_archiver_tests
+
+pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary")  # NOQA
 
 
 
 
 def have_gnutar():
 def have_gnutar():
@@ -25,218 +20,252 @@ def have_gnutar():
     return b"GNU tar" in stdout
     return b"GNU tar" in stdout
 
 
 
 
-class ArchiverTestCase(ArchiverTestCaseBase):
-    requires_gnutar = pytest.mark.skipif(not have_gnutar(), reason="GNU tar must be installed for this test.")
-    requires_gzip = pytest.mark.skipif(not shutil.which("gzip"), reason="gzip must be installed for this test.")
-
-    @requires_gnutar
-    def test_export_tar(self):
-        self.create_test_files()
-        os.unlink("input/flagfile")
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        self.cmd(
-            f"--repo={self.repository_location}", "export-tar", "test", "simple.tar", "--progress", "--tar-format=GNU"
-        )
-        with changedir("output"):
-            # This probably assumes GNU tar. Note -p switch to extract permissions regardless of umask.
-            subprocess.check_call(["tar", "xpf", "../simple.tar", "--warning=no-timestamp"])
-        self.assert_dirs_equal("input", "output/input", ignore_flags=True, ignore_xattrs=True, ignore_ns=True)
-
-    @requires_gnutar
-    @requires_gzip
-    def test_export_tar_gz(self):
-        if not shutil.which("gzip"):
-            pytest.skip("gzip is not installed")
-        self.create_test_files()
-        os.unlink("input/flagfile")
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        list = self.cmd(
-            f"--repo={self.repository_location}", "export-tar", "test", "simple.tar.gz", "--list", "--tar-format=GNU"
-        )
-        assert "input/file1\n" in list
-        assert "input/dir2\n" in list
-        with changedir("output"):
-            subprocess.check_call(["tar", "xpf", "../simple.tar.gz", "--warning=no-timestamp"])
-        self.assert_dirs_equal("input", "output/input", ignore_flags=True, ignore_xattrs=True, ignore_ns=True)
-
-    @requires_gnutar
-    def test_export_tar_strip_components(self):
-        if not shutil.which("gzip"):
-            pytest.skip("gzip is not installed")
-        self.create_test_files()
-        os.unlink("input/flagfile")
-        self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
-        self.cmd(f"--repo={self.repository_location}", "create", "test", "input")
-        list = self.cmd(
-            f"--repo={self.repository_location}",
-            "export-tar",
-            "test",
-            "simple.tar",
-            "--strip-components=1",
-            "--list",
-            "--tar-format=GNU",
-        )
-        # --list's path are those before processing with --strip-components
-        assert "input/file1\n" in list
-        assert "input/dir2\n" in list
-        with changedir("output"):
-            subprocess.check_call(["tar", "xpf", "../simple.tar", "--warning=no-timestamp"])
-        self.assert_dirs_equal("input", "output/", ignore_flags=True, ignore_xattrs=True, ignore_ns=True)
-
-    @requires_hardlinks
-    @requires_gnutar
-    def test_export_tar_strip_components_links(self):
-        self._extract_hardlinks_setup()
-        self.cmd(
-            f"--repo={self.repository_location}",
-            "export-tar",
-            "test",
-            "output.tar",
-            "--strip-components=2",
-            "--tar-format=GNU",
-        )
-        with changedir("output"):
-            subprocess.check_call(["tar", "xpf", "../output.tar", "--warning=no-timestamp"])
-            assert os.stat("hardlink").st_nlink == 2
-            assert os.stat("subdir/hardlink").st_nlink == 2
-            assert os.stat("aaaa").st_nlink == 2
-            assert os.stat("source2").st_nlink == 2
-
-    @requires_hardlinks
-    @requires_gnutar
-    def test_extract_hardlinks_tar(self):
-        self._extract_hardlinks_setup()
-        self.cmd(
-            f"--repo={self.repository_location}", "export-tar", "test", "output.tar", "input/dir1", "--tar-format=GNU"
-        )
-        with changedir("output"):
-            subprocess.check_call(["tar", "xpf", "../output.tar", "--warning=no-timestamp"])
-            assert os.stat("input/dir1/hardlink").st_nlink == 2
-            assert os.stat("input/dir1/subdir/hardlink").st_nlink == 2
-            assert os.stat("input/dir1/aaaa").st_nlink == 2
-            assert os.stat("input/dir1/source2").st_nlink == 2
-
-    def test_import_tar(self, tar_format="PAX"):
-        self.create_test_files(create_hardlinks=False)  # hardlinks become separate files
-        os.unlink("input/flagfile")
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        self.cmd(f"--repo={self.repository_location}", "create", "src", "input")
-        self.cmd(f"--repo={self.repository_location}", "export-tar", "src", "simple.tar", f"--tar-format={tar_format}")
-        self.cmd(f"--repo={self.repository_location}", "import-tar", "dst", "simple.tar")
-        with changedir(self.output_path):
-            self.cmd(f"--repo={self.repository_location}", "extract", "dst")
-        self.assert_dirs_equal("input", "output/input", ignore_ns=True, ignore_xattrs=True)
-
-    def test_import_unusual_tar(self):
-        # Contains these, unusual entries:
-        # /foobar
-        # ./bar
-        # ./foo2/
-        # ./foo//bar
-        # ./
-        tar_archive = os.path.join(os.path.dirname(__file__), "unusual_paths.tar")
-
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        self.cmd(f"--repo={self.repository_location}", "import-tar", "dst", tar_archive)
-        files = self.cmd(f"--repo={self.repository_location}", "list", "dst", "--format", "{path}{NL}").splitlines()
-        self.assert_equal(set(files), {"foobar", "bar", "foo2", "foo/bar", "."})
-
-    def test_import_tar_with_dotdot(self):
-        # Contains this file:
-        # ../../../../etc/shadow
-        tar_archive = os.path.join(os.path.dirname(__file__), "dotdot_path.tar")
-
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        with pytest.raises(ValueError, match="unexpected '..' element in path '../../../../etc/shadow'"):
-            self.cmd(f"--repo={self.repository_location}", "import-tar", "dst", tar_archive, exit_code=2)
-
-    @requires_gzip
-    def test_import_tar_gz(self, tar_format="GNU"):
-        if not shutil.which("gzip"):
-            pytest.skip("gzip is not installed")
-        self.create_test_files(create_hardlinks=False)  # hardlinks become separate files
-        os.unlink("input/flagfile")
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        self.cmd(f"--repo={self.repository_location}", "create", "src", "input")
-        self.cmd(f"--repo={self.repository_location}", "export-tar", "src", "simple.tgz", f"--tar-format={tar_format}")
-        self.cmd(f"--repo={self.repository_location}", "import-tar", "dst", "simple.tgz")
-        with changedir(self.output_path):
-            self.cmd(f"--repo={self.repository_location}", "extract", "dst")
-        self.assert_dirs_equal("input", "output/input", ignore_ns=True, ignore_xattrs=True)
-
-    @requires_gnutar
-    def test_import_concatenated_tar_with_ignore_zeros(self):
-        self.create_test_files(create_hardlinks=False)  # hardlinks become separate files
-        os.unlink("input/flagfile")
-        with changedir("input"):
-            subprocess.check_call(["tar", "cf", "file1.tar", "file1"])
-            subprocess.check_call(["tar", "cf", "the_rest.tar", "--exclude", "file1*", "."])
-            with open("concatenated.tar", "wb") as concatenated:
-                with open("file1.tar", "rb") as file1:
-                    concatenated.write(file1.read())
-                # Clean up for assert_dirs_equal.
-                os.unlink("file1.tar")
-
-                with open("the_rest.tar", "rb") as the_rest:
-                    concatenated.write(the_rest.read())
-                # Clean up for assert_dirs_equal.
-                os.unlink("the_rest.tar")
-
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        self.cmd(f"--repo={self.repository_location}", "import-tar", "--ignore-zeros", "dst", "input/concatenated.tar")
-        # Clean up for assert_dirs_equal.
-        os.unlink("input/concatenated.tar")
-
-        with changedir(self.output_path):
-            self.cmd(f"--repo={self.repository_location}", "extract", "dst")
-        self.assert_dirs_equal("input", "output", ignore_ns=True, ignore_xattrs=True)
-
-    @requires_gnutar
-    def test_import_concatenated_tar_without_ignore_zeros(self):
-        self.create_test_files(create_hardlinks=False)  # hardlinks become separate files
-        os.unlink("input/flagfile")
-        with changedir("input"):
-            subprocess.check_call(["tar", "cf", "file1.tar", "file1"])
-            subprocess.check_call(["tar", "cf", "the_rest.tar", "--exclude", "file1*", "."])
-            with open("concatenated.tar", "wb") as concatenated:
-                with open("file1.tar", "rb") as file1:
-                    concatenated.write(file1.read())
-                with open("the_rest.tar", "rb") as the_rest:
-                    concatenated.write(the_rest.read())
-                os.unlink("the_rest.tar")
-
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        self.cmd(f"--repo={self.repository_location}", "import-tar", "dst", "input/concatenated.tar")
-
-        with changedir(self.output_path):
-            self.cmd(f"--repo={self.repository_location}", "extract", "dst")
-
-        # Negative test -- assert that only file1 has been extracted, and the_rest has been ignored
-        # due to zero-filled block marker.
-        self.assert_equal(os.listdir("output"), ["file1"])
-
-    def test_roundtrip_pax_borg(self):
-        self.create_test_files()
-        self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
-        self.cmd(f"--repo={self.repository_location}", "create", "src", "input")
-        self.cmd(f"--repo={self.repository_location}", "export-tar", "src", "simple.tar", "--tar-format=BORG")
-        self.cmd(f"--repo={self.repository_location}", "import-tar", "dst", "simple.tar")
-        with changedir(self.output_path):
-            self.cmd(f"--repo={self.repository_location}", "extract", "dst")
-        self.assert_dirs_equal("input", "output/input")
-
-
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    """run the same tests, but with a remote repository"""
-
-
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    """runs the same tests, but via the borg binary"""
-
-    @unittest.skip("does not work with binaries")
-    def test_import_tar_with_dotdot(self):
-        # the test checks for a raised exception. that can't work if the code runs in a separate process.
-        pass
+requires_gnutar = pytest.mark.skipif(not have_gnutar(), reason="GNU tar must be installed for this test.")
+requires_gzip = pytest.mark.skipif(not shutil.which("gzip"), reason="gzip must be installed for this test.")
+
+
+@requires_gnutar
+def test_export_tar(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_test_files(input_path)
+    os.unlink("input/flagfile")
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    cmd(archiver, f"--repo={repo_location}", "export-tar", "test", "simple.tar", "--progress", "--tar-format=GNU")
+    with changedir("output"):
+        # This probably assumes GNU tar. Note -p switch to extract permissions regardless of umask.
+        subprocess.check_call(["tar", "xpf", "../simple.tar", "--warning=no-timestamp"])
+    assert_dirs_equal("input", "output/input", ignore_flags=True, ignore_xattrs=True, ignore_ns=True)
+
+
+@requires_gnutar
+@requires_gzip
+def test_export_tar_gz(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_test_files(input_path)
+    os.unlink("input/flagfile")
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    test_list = cmd(
+        archiver, f"--repo={repo_location}", "export-tar", "test", "simple.tar.gz", "--list", "--tar-format=GNU"
+    )
+    assert "input/file1\n" in test_list
+    assert "input/dir2\n" in test_list
+
+    with changedir("output"):
+        subprocess.check_call(["tar", "xpf", "../simple.tar.gz", "--warning=no-timestamp"])
+    assert_dirs_equal("input", "output/input", ignore_flags=True, ignore_xattrs=True, ignore_ns=True)
+
+
+@requires_gnutar
+@requires_gzip
+def test_export_tar_strip_components(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+    create_test_files(input_path)
+    os.unlink("input/flagfile")
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
+    cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
+    test_list = cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "export-tar",
+        "test",
+        "simple.tar",
+        "--strip-components=1",
+        "--list",
+        "--tar-format=GNU",
+    )
+    # --list's path are those before processing with --strip-components
+    assert "input/file1\n" in test_list
+    assert "input/dir2\n" in test_list
+
+    with changedir("output"):
+        subprocess.check_call(["tar", "xpf", "../simple.tar", "--warning=no-timestamp"])
+    assert_dirs_equal("input", "output/", ignore_flags=True, ignore_xattrs=True, ignore_ns=True)
+
+
+@requires_hardlinks
+@requires_gnutar
+def test_export_tar_strip_components_links(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    _extract_hardlinks_setup(archiver)
+
+    cmd(
+        archiver,
+        f"--repo={repo_location}",
+        "export-tar",
+        "test",
+        "output.tar",
+        "--strip-components=2",
+        "--tar-format=GNU",
+    )
+
+    with changedir("output"):
+        subprocess.check_call(["tar", "xpf", "../output.tar", "--warning=no-timestamp"])
+        assert os.stat("hardlink").st_nlink == 2
+        assert os.stat("subdir/hardlink").st_nlink == 2
+        assert os.stat("aaaa").st_nlink == 2
+        assert os.stat("source2").st_nlink == 2
+
+
+@requires_hardlinks
+@requires_gnutar
+def test_extract_hardlinks_tar(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    _extract_hardlinks_setup(archiver)
+
+    cmd(archiver, f"--repo={repo_location}", "export-tar", "test", "output.tar", "input/dir1", "--tar-format=GNU")
+
+    with changedir("output"):
+        subprocess.check_call(["tar", "xpf", "../output.tar", "--warning=no-timestamp"])
+        assert os.stat("input/dir1/hardlink").st_nlink == 2
+        assert os.stat("input/dir1/subdir/hardlink").st_nlink == 2
+        assert os.stat("input/dir1/aaaa").st_nlink == 2
+        assert os.stat("input/dir1/source2").st_nlink == 2
+
+
+def test_import_tar(archivers, request, tar_format="PAX"):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path, output_path = archiver.repository_location, archiver.input_path, archiver.output_path
+    create_test_files(input_path, create_hardlinks=False)  # hardlinks become separate files
+    os.unlink("input/flagfile")
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    cmd(archiver, f"--repo={repo_location}", "create", "src", "input")
+    cmd(archiver, f"--repo={repo_location}", "export-tar", "src", "simple.tar", f"--tar-format={tar_format}")
+    cmd(archiver, f"--repo={repo_location}", "import-tar", "dst", "simple.tar")
+
+    with changedir(output_path):
+        cmd(archiver, f"--repo={repo_location}", "extract", "dst")
+    assert_dirs_equal("input", "output/input", ignore_ns=True, ignore_xattrs=True)
+
+
+def test_import_unusual_tar(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+
+    # Contains these, unusual entries:
+    # /foobar
+    # ./bar
+    # ./foo2/
+    # ./foo//bar
+    # ./
+    tar_archive = os.path.join(os.path.dirname(__file__), "unusual_paths.tar")
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    cmd(archiver, f"--repo={repo_location}", "import-tar", "dst", tar_archive)
+    files = cmd(archiver, f"--repo={repo_location}", "list", "dst", "--format", "{path}{NL}").splitlines()
+    assert set(files) == {"foobar", "bar", "foo2", "foo/bar", "."}
+
+
+def test_import_tar_with_dotdot(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location = archiver.repository_location
+    if archiver.EXE:  # the test checks for a raised exception. that can't work if the code runs in a separate process.
+        pytest.skip("does not work with binaries")
+
+    # Contains this file:
+    # ../../../../etc/shadow
+    tar_archive = os.path.join(os.path.dirname(__file__), "dotdot_path.tar")
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    with pytest.raises(ValueError, match="unexpected '..' element in path '../../../../etc/shadow'"):
+        cmd(archiver, f"--repo={repo_location}", "import-tar", "dst", tar_archive, exit_code=2)
+
+
+@requires_gzip
+def test_import_tar_gz(archivers, request, tar_format="GNU"):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path, output_path = archiver.repository_location, archiver.input_path, archiver.output_path
+    create_test_files(input_path, create_hardlinks=False)  # hardlinks become separate files
+    os.unlink("input/flagfile")
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    cmd(archiver, f"--repo={repo_location}", "create", "src", "input")
+    cmd(archiver, f"--repo={repo_location}", "export-tar", "src", "simple.tgz", f"--tar-format={tar_format}")
+    cmd(archiver, f"--repo={repo_location}", "import-tar", "dst", "simple.tgz")
+
+    with changedir(output_path):
+        cmd(archiver, f"--repo={repo_location}", "extract", "dst")
+    assert_dirs_equal("input", "output/input", ignore_ns=True, ignore_xattrs=True)
+
+
+@requires_gnutar
+def test_import_concatenated_tar_with_ignore_zeros(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path, output_path = archiver.repository_location, archiver.input_path, archiver.output_path
+    create_test_files(input_path, create_hardlinks=False)  # hardlinks become separate files
+    os.unlink("input/flagfile")
+
+    with changedir("input"):
+        subprocess.check_call(["tar", "cf", "file1.tar", "file1"])
+        subprocess.check_call(["tar", "cf", "the_rest.tar", "--exclude", "file1*", "."])
+        with open("concatenated.tar", "wb") as concatenated:
+            with open("file1.tar", "rb") as file1:
+                concatenated.write(file1.read())
+            # Clean up for assert_dirs_equal.
+            os.unlink("file1.tar")
+
+            with open("the_rest.tar", "rb") as the_rest:
+                concatenated.write(the_rest.read())
+            # Clean up for assert_dirs_equal.
+            os.unlink("the_rest.tar")
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    cmd(archiver, f"--repo={repo_location}", "import-tar", "--ignore-zeros", "dst", "input/concatenated.tar")
+    # Clean up for assert_dirs_equal.
+    os.unlink("input/concatenated.tar")
+
+    with changedir(output_path):
+        cmd(archiver, f"--repo={repo_location}", "extract", "dst")
+    assert_dirs_equal("input", "output", ignore_ns=True, ignore_xattrs=True)
+
+
+@requires_gnutar
+def test_import_concatenated_tar_without_ignore_zeros(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path, output_path = archiver.repository_location, archiver.input_path, archiver.output_path
+    create_test_files(input_path, create_hardlinks=False)  # hardlinks become separate files
+    os.unlink("input/flagfile")
+
+    with changedir("input"):
+        subprocess.check_call(["tar", "cf", "file1.tar", "file1"])
+        subprocess.check_call(["tar", "cf", "the_rest.tar", "--exclude", "file1*", "."])
+        with open("concatenated.tar", "wb") as concatenated:
+            with open("file1.tar", "rb") as file1:
+                concatenated.write(file1.read())
+            with open("the_rest.tar", "rb") as the_rest:
+                concatenated.write(the_rest.read())
+            os.unlink("the_rest.tar")
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    cmd(archiver, f"--repo={repo_location}", "import-tar", "dst", "input/concatenated.tar")
+
+    with changedir(output_path):
+        cmd(archiver, f"--repo={repo_location}", "extract", "dst")
+
+    # Negative test -- assert that only file1 has been extracted, and the_rest has been ignored
+    # due to zero-filled block marker.
+    assert os.listdir("output") == ["file1"]
+
+
+def test_roundtrip_pax_borg(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path, output_path = archiver.repository_location, archiver.input_path, archiver.output_path
+    create_test_files(input_path)
+
+    cmd(archiver, f"--repo={repo_location}", "rcreate", "--encryption=none")
+    cmd(archiver, f"--repo={repo_location}", "create", "src", "input")
+    cmd(archiver, f"--repo={repo_location}", "export-tar", "src", "simple.tar", "--tar-format=BORG")
+    cmd(archiver, f"--repo={repo_location}", "import-tar", "dst", "simple.tar")
+
+    with changedir(output_path):
+        cmd(archiver, f"--repo={repo_location}", "extract", "dst")
+    assert_dirs_equal("input", "output/input")

+ 289 - 298
src/borg/testsuite/archiver/transfer_cmd.py

@@ -2,305 +2,296 @@ import json
 import os
 import os
 import stat
 import stat
 import tarfile
 import tarfile
-import unittest
+
+import pytest
 
 
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
 from ...helpers.time import parse_timestamp
 from ...helpers.time import parse_timestamp
 from ..platform import is_win32
 from ..platform import is_win32
-from . import ArchiverTestCaseBase, RemoteArchiverTestCaseBase, ArchiverTestCaseBinaryBase, RK_ENCRYPTION, BORG_EXES
-
-
-class ArchiverTestCase(ArchiverTestCaseBase):
-    def test_transfer(self):
-        def check_repo(repo_option):
-            listing = self.cmd(repo_option, "rlist", "--short")
-            assert "arch1" in listing
-            assert "arch2" in listing
-            listing = self.cmd(repo_option, "list", "--short", "arch1")
-            assert "file1" in listing
-            assert "dir2/file2" in listing
-            self.cmd(repo_option, "check")
-
-        self.create_test_files()
-        repo1 = f"--repo={self.repository_location}1"
-        repo2 = f"--repo={self.repository_location}2"
-        other_repo1 = f"--other-repo={self.repository_location}1"
-
-        self.cmd(repo1, "rcreate", RK_ENCRYPTION)
-        self.cmd(repo1, "create", "arch1", "input")
-        self.cmd(repo1, "create", "arch2", "input")
-        check_repo(repo1)
-
-        self.cmd(repo2, "rcreate", RK_ENCRYPTION, other_repo1)
-        self.cmd(repo2, "transfer", other_repo1, "--dry-run")
-        self.cmd(repo2, "transfer", other_repo1)
-        self.cmd(repo2, "transfer", other_repo1, "--dry-run")
-        check_repo(repo2)
-
-    def test_transfer_upgrade(self):
-        # test upgrading a borg 1.2 repo to borg 2
-        # testing using json is a bit problematic because parseformat (used for json dumping)
-        # already tweaks the values a bit for better printability (like e.g. using the empty
-        # string for attributes that are not present).
-
-        # borg 1.2 repo dir contents, created by: scripts/make-testdata/test_transfer_upgrade.sh
-        repo12_tar = os.path.join(os.path.dirname(__file__), "repo12.tar.gz")
-        repo12_tzoffset = "+01:00"  # timezone used to create the repo/archives/json dumps inside the tar file
-
-        def convert_tz(local_naive, tzoffset, tzinfo):
-            # local_naive was meant to be in tzoffset timezone (e.g. "+01:00"),
-            # but we want it non-naive in tzinfo time zone (e.g. timezone.utc
-            # or None if local timezone is desired).
-            ts = parse_timestamp(local_naive + tzoffset)
-            return ts.astimezone(tzinfo).isoformat(timespec="microseconds")
-
-        dst_dir = f"{self.repository_location}1"
-        os.makedirs(dst_dir)
-        with tarfile.open(repo12_tar) as tf:
-            tf.extractall(dst_dir)
-
-        other_repo1 = f"--other-repo={self.repository_location}1"
-        repo2 = f"--repo={self.repository_location}2"
-
-        assert os.environ.get("BORG_PASSPHRASE") == "waytooeasyonlyfortests"
-        os.environ["BORG_TESTONLY_WEAKEN_KDF"] = "0"  # must use the strong kdf here or it can't decrypt the key
-
-        self.cmd(repo2, "rcreate", RK_ENCRYPTION, other_repo1)
-        self.cmd(repo2, "transfer", other_repo1, "--upgrader=From12To20")
-        self.cmd(repo2, "check")
-
-        # check list of archives / manifest
-        rlist_json = self.cmd(repo2, "rlist", "--json")
-        got = json.loads(rlist_json)
-        with open(os.path.join(dst_dir, "test_meta", "repo_list.json")) as f:
-            expected = json.load(f)
-        for key in "encryption", "repository":
-            # some stuff obviously needs to be different, remove that!
-            del got[key]
-            del expected[key]
-        assert len(got["archives"]) == len(expected["archives"])
-        for got_archive, expected_archive in zip(got["archives"], expected["archives"]):
-            del got_archive["id"]
-            del expected_archive["id"]
-            del expected_archive["barchive"]
-            # timestamps:
-            # borg 1.2 transformed to local time and had microseconds = 0, no tzoffset
-            # borg 2 uses local time, with microseconds and with tzoffset
-            for key in "start", "time":
-                # fix expectation: local time meant +01:00, so we convert that to whatever local tz is here.
-                expected_archive[key] = convert_tz(expected_archive[key], repo12_tzoffset, None)
-                # set microseconds to 0, so we can compare got with expected.
-                got_ts = parse_timestamp(got_archive[key])
-                got_archive[key] = got_ts.replace(microsecond=0).isoformat(timespec="microseconds")
-        assert got == expected
-
-        for archive in got["archives"]:
-            name = archive["name"]
-            # check archive contents
-            list_json = self.cmd(repo2, "list", "--json-lines", name)
-            got = [json.loads(line) for line in list_json.splitlines()]
-            with open(os.path.join(dst_dir, "test_meta", f"{name}_list.json")) as f:
-                lines = f.read()
-            expected = [json.loads(line) for line in lines.splitlines()]
-            hardlinks = {}
-            for g, e in zip(got, expected):
-                print(f"exp: {e}\ngot: {g}\n")
-
-                # borg 1.2 parseformat uses .get("bsdflags", 0) so the json has 0 even
-                # if there were no bsdflags stored in the item.
-                # borg 2 parseformat uses .get("bsdflags"), so the json has either an int
-                # (if the archived item has bsdflags) or None (if the item has no bsdflags).
-                if e["flags"] == 0 and g["flags"] is None:
-                    # this is expected behaviour, fix the expectation
-                    e["flags"] = None
-
-                # borg2 parseformat falls back to str(item.uid) if it does not have item.user,
-                # same for str(item.gid) and no item.group.
-                # so user/group are always str type, even if it is just str(uid) or str(gid).
-                # fix expectation (borg1 used int type for user/group in that case):
-                if g["user"] == str(g["uid"]) == str(e["uid"]):
-                    e["user"] = str(e["uid"])
-                if g["group"] == str(g["gid"]) == str(e["gid"]):
-                    e["group"] = str(e["gid"])
-
-                for key in "mtime", "ctime", "atime":
-                    if key in e:
-                        e[key] = convert_tz(e[key], repo12_tzoffset, None)
-
-                # borg 1 used hardlink slaves linking back to their hardlink masters.
-                # borg 2 uses symmetric approach: just normal items. if they are hardlinks,
-                # each item has normal attributes, including the chunks list, size. additionally,
-                # they have a hlid and same hlid means same inode / belonging to same set of hardlinks.
-                hardlink = bool(g.get("hlid"))  # note: json has "" as hlid if there is no hlid in the item
-                if hardlink:
-                    hardlinks[g["path"]] = g["hlid"]
-                    if e["mode"].startswith("h"):
-                        # fix expectations: borg1 signalled a hardlink slave with "h"
-                        # borg2 treats all hardlinks symmetrically as normal files
-                        e["mode"] = g["mode"][0] + e["mode"][1:]
-                        # borg1 used source/linktarget to link back to hardlink master
-                        assert e["source"] != ""
-                        assert e["linktarget"] != ""
-                        # fix expectations: borg2 does not use source/linktarget any more for hardlinks
-                        e["source"] = ""
-                        e["linktarget"] = ""
-                        # borg 1 has size == 0 for hardlink slaves, borg 2 has the real file size
-                        assert e["size"] == 0
-                        assert g["size"] >= 0
-                        # fix expectation for size
-                        e["size"] = g["size"]
-                    # Note: size == 0 for all items without a size or chunks list (like e.g. directories)
-                    # Note: healthy == True indicates the *absence* of the additional chunks_healthy list
-                del g["hlid"]
-
-                # borg 1 used "linktarget" and "source" for links, borg 2 uses "target" for symlinks.
-                if g["target"] == e["linktarget"]:
-                    e["target"] = e["linktarget"]
-                    del e["linktarget"]
-                    del e["source"]
-
-                if e["type"] == "b" and is_win32:
-                    # The S_IFBLK macro is broken on MINGW
-                    del e["type"], g["type"]
-                    del e["mode"], g["mode"]
-                assert g == e
-
-            if name == "archive1":
-                # hardlinks referring to same inode have same hlid
-                assert hardlinks["tmp/borgtest/hardlink1"] == hardlinks["tmp/borgtest/hardlink2"]
-
-        self.repository_path = f"{self.repository_location}2"
-        for archive_name in ("archive1", "archive2"):
-            archive, repository = self.open_archive(archive_name)
-            with repository:
-                for item in archive.iter_items():
-                    print(item)
-
-                    # borg1 used to store some stuff with None values
-                    # borg2 does just not have the key if the value is not known.
-                    item_dict = item.as_dict()
-                    assert not any(value is None for value in item_dict.values()), f"found None value in {item_dict}"
-
-                    # with borg2, all items with chunks must have a precomputed size
-                    assert "chunks" not in item or "size" in item and item.size >= 0
-
-                    if item.path.endswith("directory") or item.path.endswith("borgtest"):
-                        assert stat.S_ISDIR(item.mode)
-                        assert item.uid > 0
-                        assert "hlid" not in item
-                    elif item.path.endswith("no_hardlink") or item.path.endswith("target"):
-                        assert stat.S_ISREG(item.mode)
-                        assert item.uid > 0
-                        assert "hlid" not in item
-                        assert len(item.chunks) > 0
-                        assert "bsdflags" not in item
-                    elif item.path.endswith("hardlink1"):
-                        assert stat.S_ISREG(item.mode)
-                        assert item.uid > 0
-                        assert "hlid" in item and len(item.hlid) == 32  # 256bit
-                        hlid1 = item.hlid
-                        assert len(item.chunks) > 0
-                        chunks1 = item.chunks
-                        size1 = item.size
-                        assert "source" not in item
-                        assert "target" not in item
-                        assert "hardlink_master" not in item
-                    elif item.path.endswith("hardlink2"):
-                        assert stat.S_ISREG(item.mode)
-                        assert item.uid > 0
-                        assert "hlid" in item and len(item.hlid) == 32  # 256bit
-                        hlid2 = item.hlid
-                        assert len(item.chunks) > 0
-                        chunks2 = item.chunks
-                        size2 = item.size
-                        assert "source" not in item
-                        assert "target" not in item
-                        assert "hardlink_master" not in item
-                    elif item.path.endswith("broken_symlink"):
-                        assert stat.S_ISLNK(item.mode)
-                        assert item.target == "doesnotexist"
-                        assert item.uid > 0
-                        assert "hlid" not in item
-                    elif item.path.endswith("symlink"):
-                        assert stat.S_ISLNK(item.mode)
-                        assert item.target == "target"
-                        assert item.uid > 0
-                        assert "hlid" not in item
-                    elif item.path.endswith("fifo"):
-                        assert stat.S_ISFIFO(item.mode)
-                        assert item.uid > 0
-                        assert "hlid" not in item
-                    elif item.path.endswith("without_xattrs"):
-                        assert stat.S_ISREG(item.mode)
-                        assert "xattrs" not in item
-                    elif item.path.endswith("with_xattrs"):
-                        assert stat.S_ISREG(item.mode)
-                        assert "xattrs" in item
-                        assert len(item.xattrs) == 2
-                        assert item.xattrs[b"key1"] == b"value"
-                        assert item.xattrs[b"key2"] == b""
-                    elif item.path.endswith("without_flags"):
-                        assert stat.S_ISREG(item.mode)
-                        # borg1 did not store a flags value of 0 ("nothing special")
-                        # borg2 reflects this "i do not know" by not having the k/v pair
-                        assert "bsdflags" not in item
-                    elif item.path.endswith("with_flags"):
-                        assert stat.S_ISREG(item.mode)
-                        assert "bsdflags" in item
-                        assert item.bsdflags == stat.UF_NODUMP
-                    elif item.path.endswith("root_stuff"):
-                        assert stat.S_ISDIR(item.mode)
-                        assert item.uid == 0
-                        assert item.gid == 0
-                        assert "hlid" not in item
-                    elif item.path.endswith("cdev_34_56"):
-                        assert stat.S_ISCHR(item.mode)
-                        # looks like we can't use os.major/minor with data coming from another platform,
-                        # thus we only do a rather rough check here:
-                        assert "rdev" in item and item.rdev != 0
-                        assert item.uid == 0
-                        assert item.gid == 0
-                        assert item.user == "root"
-                        assert item.group in ("root", "wheel")
-                        assert "hlid" not in item
-                    elif item.path.endswith("bdev_12_34"):
-                        if not is_win32:
-                            # The S_IFBLK macro is broken on MINGW
-                            assert stat.S_ISBLK(item.mode)
-                        # looks like we can't use os.major/minor with data coming from another platform,
-                        # thus we only do a rather rough check here:
-                        assert "rdev" in item and item.rdev != 0
-                        assert item.uid == 0
-                        assert item.gid == 0
-                        assert item.user == "root"
-                        assert item.group in ("root", "wheel")
-                        assert "hlid" not in item
-                    elif item.path.endswith("strange_uid_gid"):
-                        assert stat.S_ISREG(item.mode)
-                        assert item.uid == 54321
-                        assert item.gid == 54321
-                        assert "user" not in item
-                        assert "group" not in item
-                    else:
-                        raise NotImplementedError(f"test missing for {item.path}")
-            if archive_name == "archive1":
-                assert hlid1 == hlid2
-                assert size1 == size2 == 16 + 1  # 16 text chars + \n
-                assert chunks1 == chunks2
-
-
-class RemoteArchiverTestCase(RemoteArchiverTestCaseBase, ArchiverTestCase):
-    """run the same tests, but with a remote repository"""
-
-    @unittest.skip("only works locally")
-    def test_transfer_upgrade(self):
-        pass
-
-
-@unittest.skipUnless("binary" in BORG_EXES, "no borg.exe available")
-class ArchiverTestCaseBinary(ArchiverTestCaseBinaryBase, ArchiverTestCase):
-    """runs the same tests, but via the borg binary"""
-
-    @unittest.skip("only works locally")
-    def test_transfer_upgrade(self):
-        pass
+from . import cmd, create_test_files, RK_ENCRYPTION, open_archive, generate_archiver_tests
+
+pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary")  # NOQA
+
+
+def test_transfer(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    repo_location, input_path = archiver.repository_location, archiver.input_path
+
+    def check_repo(repo_option):
+        listing = cmd(archiver, repo_option, "rlist", "--short")
+        assert "arch1" in listing
+        assert "arch2" in listing
+        listing = cmd(archiver, repo_option, "list", "--short", "arch1")
+        assert "file1" in listing
+        assert "dir2/file2" in listing
+        cmd(archiver, repo_option, "check")
+
+    create_test_files(input_path)
+    repo1 = f"--repo={repo_location}1"
+    repo2 = f"--repo={repo_location}2"
+    other_repo1 = f"--other-repo={repo_location}1"
+
+    cmd(archiver, repo1, "rcreate", RK_ENCRYPTION)
+    cmd(archiver, repo1, "create", "arch1", "input")
+    cmd(archiver, repo1, "create", "arch2", "input")
+    check_repo(repo1)
+
+    cmd(archiver, repo2, "rcreate", RK_ENCRYPTION, other_repo1)
+    cmd(archiver, repo2, "transfer", other_repo1, "--dry-run")
+    cmd(archiver, repo2, "transfer", other_repo1)
+    cmd(archiver, repo2, "transfer", other_repo1, "--dry-run")
+    check_repo(repo2)
+
+
+def test_transfer_upgrade(archivers, request):
+    archiver = request.getfixturevalue(archivers)
+    if archiver.get_kind() in ["remote", "binary"]:
+        pytest.skip("only works locally")
+    repo_location = archiver.repository_location
+
+    # test upgrading a borg 1.2 repo to borg 2
+    # testing using json is a bit problematic because parseformat (used for json dumping)
+    # already tweaks the values a bit for better printability (like e.g. using the empty
+    # string for attributes that are not present).
+    # borg 1.2 repo dir contents, created by: scripts/make-testdata/test_transfer_upgrade.sh
+    repo12_tar = os.path.join(os.path.dirname(__file__), "repo12.tar.gz")
+    repo12_tzoffset = "+01:00"  # timezone used to create the repo/archives/json dumps inside the tar file
+
+    def convert_tz(local_naive, tzoffset, tzinfo):
+        # local_naive was meant to be in tzoffset timezone (e.g. "+01:00"),
+        # but we want it non-naive in tzinfo time zone (e.g. timezone.utc
+        # or None if local timezone is desired).
+        ts = parse_timestamp(local_naive + tzoffset)
+        return ts.astimezone(tzinfo).isoformat(timespec="microseconds")
+
+    dst_dir = f"{repo_location}1"
+    os.makedirs(dst_dir)
+    with tarfile.open(repo12_tar) as tf:
+        tf.extractall(dst_dir)
+
+    other_repo1 = f"--other-repo={repo_location}1"
+    repo2 = f"--repo={repo_location}2"
+
+    assert os.environ.get("BORG_PASSPHRASE") == "waytooeasyonlyfortests"
+    os.environ["BORG_TESTONLY_WEAKEN_KDF"] = "0"  # must use the strong kdf here or it can't decrypt the key
+
+    cmd(archiver, repo2, "rcreate", RK_ENCRYPTION, other_repo1)
+    cmd(archiver, repo2, "transfer", other_repo1, "--upgrader=From12To20")
+    cmd(archiver, repo2, "check")
+
+    # check list of archives / manifest
+    rlist_json = cmd(archiver, repo2, "rlist", "--json")
+    got = json.loads(rlist_json)
+    with open(os.path.join(dst_dir, "test_meta", "repo_list.json")) as f:
+        expected = json.load(f)
+
+    for key in "encryption", "repository":
+        # some stuff obviously needs to be different, remove that!
+        del got[key]
+        del expected[key]
+    assert len(got["archives"]) == len(expected["archives"])
+
+    for got_archive, expected_archive in zip(got["archives"], expected["archives"]):
+        del got_archive["id"]
+        del expected_archive["id"]
+        del expected_archive["barchive"]
+        # timestamps:
+        # borg 1.2 transformed to local time and had microseconds = 0, no tzoffset
+        # borg 2 uses local time, with microseconds and with tzoffset
+        for key in "start", "time":
+            # fix expectation: local time meant +01:00, so we convert that to whatever local tz is here.
+            expected_archive[key] = convert_tz(expected_archive[key], repo12_tzoffset, None)
+            # set microseconds to 0, so we can compare got with expected.
+            got_ts = parse_timestamp(got_archive[key])
+            got_archive[key] = got_ts.replace(microsecond=0).isoformat(timespec="microseconds")
+    assert got == expected
+
+    for archive in got["archives"]:
+        name = archive["name"]
+        # check archive contents
+        list_json = cmd(archiver, repo2, "list", "--json-lines", name)
+        got = [json.loads(line) for line in list_json.splitlines()]
+        with open(os.path.join(dst_dir, "test_meta", f"{name}_list.json")) as f:
+            lines = f.read()
+        expected = [json.loads(line) for line in lines.splitlines()]
+        hardlinks = {}
+        for g, e in zip(got, expected):
+            # borg 1.2 parseformat uses .get("bsdflags", 0) so the json has 0 even
+            # if there were no bsdflags stored in the item.
+            # borg 2 parseformat uses .get("bsdflags"), so the json has either an int
+            # (if the archived item has bsdflags) or None (if the item has no bsdflags).
+            if e["flags"] == 0 and g["flags"] is None:
+                # this is expected behaviour, fix the expectation
+                e["flags"] = None
+
+            # borg2 parseformat falls back to str(item.uid) if it does not have item.user,
+            # same for str(item.gid) and no item.group.
+            # so user/group are always str type, even if it is just str(uid) or str(gid).
+            # fix expectation (borg1 used int type for user/group in that case):
+            if g["user"] == str(g["uid"]) == str(e["uid"]):
+                e["user"] = str(e["uid"])
+            if g["group"] == str(g["gid"]) == str(e["gid"]):
+                e["group"] = str(e["gid"])
+
+            for key in "mtime", "ctime", "atime":
+                if key in e:
+                    e[key] = convert_tz(e[key], repo12_tzoffset, None)
+
+            # borg 1 used hardlink slaves linking back to their hardlink masters.
+            # borg 2 uses symmetric approach: just normal items. if they are hardlinks,
+            # each item has normal attributes, including the chunks list, size. additionally,
+            # they have a hlid and same hlid means same inode / belonging to same set of hardlinks.
+            hardlink = bool(g.get("hlid"))  # note: json has "" as hlid if there is no hlid in the item
+            if hardlink:
+                hardlinks[g["path"]] = g["hlid"]
+                if e["mode"].startswith("h"):
+                    # fix expectations: borg1 signalled a hardlink slave with "h"
+                    # borg2 treats all hardlinks symmetrically as normal files
+                    e["mode"] = g["mode"][0] + e["mode"][1:]
+                    # borg1 used source/linktarget to link back to hardlink master
+                    assert e["source"] != ""
+                    assert e["linktarget"] != ""
+                    # fix expectations: borg2 does not use source/linktarget any more for hardlinks
+                    e["source"] = ""
+                    e["linktarget"] = ""
+                    # borg 1 has size == 0 for hardlink slaves, borg 2 has the real file size
+                    assert e["size"] == 0
+                    assert g["size"] >= 0
+                    # fix expectation for size
+                    e["size"] = g["size"]
+                # Note: size == 0 for all items without a size or chunks list (like e.g. directories)
+                # Note: healthy == True indicates the *absence* of the additional chunks_healthy list
+            del g["hlid"]
+
+            # borg 1 used "linktarget" and "source" for links, borg 2 uses "target" for symlinks.
+            if g["target"] == e["linktarget"]:
+                e["target"] = e["linktarget"]
+                del e["linktarget"]
+                del e["source"]
+
+            if e["type"] == "b" and is_win32:
+                # The S_IFBLK macro is broken on MINGW
+                del e["type"], g["type"]
+                del e["mode"], g["mode"]
+            assert g == e
+
+        if name == "archive1":
+            # hardlinks referring to same inode have same hlid
+            assert hardlinks["tmp/borgtest/hardlink1"] == hardlinks["tmp/borgtest/hardlink2"]
+
+    repo_path = f"{repo_location}2"
+    for archive_name in ("archive1", "archive2"):
+        archive, repository = open_archive(repo_path, archive_name)
+        with repository:
+            for item in archive.iter_items():
+                # borg1 used to store some stuff with None values
+                # borg2 does just not have the key if the value is not known.
+                item_dict = item.as_dict()
+                assert not any(value is None for value in item_dict.values()), f"found None value in {item_dict}"
+
+                # with borg2, all items with chunks must have a precomputed size
+                assert "chunks" not in item or "size" in item and item.size >= 0
+
+                if item.path.endswith("directory") or item.path.endswith("borgtest"):
+                    assert stat.S_ISDIR(item.mode)
+                    assert item.uid > 0
+                    assert "hlid" not in item
+                elif item.path.endswith("no_hardlink") or item.path.endswith("target"):
+                    assert stat.S_ISREG(item.mode)
+                    assert item.uid > 0
+                    assert "hlid" not in item
+                    assert len(item.chunks) > 0
+                    assert "bsdflags" not in item
+                elif item.path.endswith("hardlink1"):
+                    assert stat.S_ISREG(item.mode)
+                    assert item.uid > 0
+                    assert "hlid" in item and len(item.hlid) == 32  # 256bit
+                    hlid1 = item.hlid
+                    assert len(item.chunks) > 0
+                    chunks1 = item.chunks
+                    size1 = item.size
+                    assert "source" not in item
+                    assert "target" not in item
+                    assert "hardlink_master" not in item
+                elif item.path.endswith("hardlink2"):
+                    assert stat.S_ISREG(item.mode)
+                    assert item.uid > 0
+                    assert "hlid" in item and len(item.hlid) == 32  # 256bit
+                    hlid2 = item.hlid
+                    assert len(item.chunks) > 0
+                    chunks2 = item.chunks
+                    size2 = item.size
+                    assert "source" not in item
+                    assert "target" not in item
+                    assert "hardlink_master" not in item
+                elif item.path.endswith("broken_symlink"):
+                    assert stat.S_ISLNK(item.mode)
+                    assert item.target == "doesnotexist"
+                    assert item.uid > 0
+                    assert "hlid" not in item
+                elif item.path.endswith("symlink"):
+                    assert stat.S_ISLNK(item.mode)
+                    assert item.target == "target"
+                    assert item.uid > 0
+                    assert "hlid" not in item
+                elif item.path.endswith("fifo"):
+                    assert stat.S_ISFIFO(item.mode)
+                    assert item.uid > 0
+                    assert "hlid" not in item
+                elif item.path.endswith("without_xattrs"):
+                    assert stat.S_ISREG(item.mode)
+                    assert "xattrs" not in item
+                elif item.path.endswith("with_xattrs"):
+                    assert stat.S_ISREG(item.mode)
+                    assert "xattrs" in item
+                    assert len(item.xattrs) == 2
+                    assert item.xattrs[b"key1"] == b"value"
+                    assert item.xattrs[b"key2"] == b""
+                elif item.path.endswith("without_flags"):
+                    assert stat.S_ISREG(item.mode)
+                    # borg1 did not store a flags value of 0 ("nothing special")
+                    # borg2 reflects this "I do not know" by not having the k/v pair
+                    assert "bsdflags" not in item
+                elif item.path.endswith("with_flags"):
+                    assert stat.S_ISREG(item.mode)
+                    assert "bsdflags" in item
+                    assert item.bsdflags == stat.UF_NODUMP
+                elif item.path.endswith("root_stuff"):
+                    assert stat.S_ISDIR(item.mode)
+                    assert item.uid == 0
+                    assert item.gid == 0
+                    assert "hlid" not in item
+                elif item.path.endswith("cdev_34_56"):
+                    assert stat.S_ISCHR(item.mode)
+                    # looks like we can't use os.major/minor with data coming from another platform,
+                    # thus we only do a rather rough check here:
+                    assert "rdev" in item and item.rdev != 0
+                    assert item.uid == 0
+                    assert item.gid == 0
+                    assert item.user == "root"
+                    assert item.group in ("root", "wheel")
+                    assert "hlid" not in item
+                elif item.path.endswith("bdev_12_34"):
+                    if not is_win32:
+                        # The S_IFBLK macro is broken on MINGW
+                        assert stat.S_ISBLK(item.mode)
+                    # looks like we can't use os.major/minor with data coming from another platform,
+                    # thus we only do a rather rough check here:
+                    assert "rdev" in item and item.rdev != 0
+                    assert item.uid == 0
+                    assert item.gid == 0
+                    assert item.user == "root"
+                    assert item.group in ("root", "wheel")
+                    assert "hlid" not in item
+                elif item.path.endswith("strange_uid_gid"):
+                    assert stat.S_ISREG(item.mode)
+                    assert item.uid == 54321
+                    assert item.gid == 54321
+                    assert "user" not in item
+                    assert "group" not in item
+                else:
+                    raise NotImplementedError(f"test missing for {item.path}")
+        if archive_name == "archive1":
+            assert hlid1 == hlid2
+            assert size1 == size2 == 16 + 1  # 16 text chars + \n
+            assert chunks1 == chunks2

+ 25 - 21
src/borg/testsuite/benchmark.py

@@ -10,7 +10,7 @@ import os
 
 
 import pytest
 import pytest
 
 
-from .archiver import changedir, cmd
+from .archiver import changedir, cmd_fixture
 from .item import Item
 from .item import Item
 from ..constants import zeros
 from ..constants import zeros
 
 
@@ -28,8 +28,8 @@ def repo_url(request, tmpdir, monkeypatch):
 
 
 
 
 @pytest.fixture(params=["none", "repokey-aes-ocb"])
 @pytest.fixture(params=["none", "repokey-aes-ocb"])
-def repo(request, cmd, repo_url):
-    cmd(f"--repo={repo_url}", "rcreate", "--encryption", request.param)
+def repo(request, cmd_fixture, repo_url):
+    cmd_fixture(f"--repo={repo_url}", "rcreate", "--encryption", request.param)
     return repo_url
     return repo_url
 
 
 
 
@@ -59,55 +59,59 @@ def testdata(request, tmpdir_factory):
 
 
 
 
 @pytest.fixture(params=["none", "lz4"])
 @pytest.fixture(params=["none", "lz4"])
-def repo_archive(request, cmd, repo, testdata):
+def repo_archive(request, cmd_fixture, repo, testdata):
     archive = "test"
     archive = "test"
-    cmd(f"--repo={repo}", "create", "--compression", request.param, archive, testdata)
+    cmd_fixture(f"--repo={repo}", "create", "--compression", request.param, archive, testdata)
     return repo, archive
     return repo, archive
 
 
 
 
-def test_create_none(benchmark, cmd, repo, testdata):
-    result, out = benchmark.pedantic(cmd, (f"--repo={repo}", "create", "--compression", "none", "test", testdata))
+def test_create_none(benchmark, cmd_fixture, repo, testdata):
+    result, out = benchmark.pedantic(
+        cmd_fixture, (f"--repo={repo}", "create", "--compression", "none", "test", testdata)
+    )
     assert result == 0
     assert result == 0
 
 
 
 
-def test_create_lz4(benchmark, cmd, repo, testdata):
-    result, out = benchmark.pedantic(cmd, (f"--repo={repo}", "create", "--compression", "lz4", "test", testdata))
+def test_create_lz4(benchmark, cmd_fixture, repo, testdata):
+    result, out = benchmark.pedantic(
+        cmd_fixture, (f"--repo={repo}", "create", "--compression", "lz4", "test", testdata)
+    )
     assert result == 0
     assert result == 0
 
 
 
 
-def test_extract(benchmark, cmd, repo_archive, tmpdir):
+def test_extract(benchmark, cmd_fixture, repo_archive, tmpdir):
     repo, archive = repo_archive
     repo, archive = repo_archive
     with changedir(str(tmpdir)):
     with changedir(str(tmpdir)):
-        result, out = benchmark.pedantic(cmd, (f"--repo={repo}", "extract", archive))
+        result, out = benchmark.pedantic(cmd_fixture, (f"--repo={repo}", "extract", archive))
     assert result == 0
     assert result == 0
 
 
 
 
-def test_delete(benchmark, cmd, repo_archive):
+def test_delete(benchmark, cmd_fixture, repo_archive):
     repo, archive = repo_archive
     repo, archive = repo_archive
-    result, out = benchmark.pedantic(cmd, (f"--repo={repo}", "delete", "-a", archive))
+    result, out = benchmark.pedantic(cmd_fixture, (f"--repo={repo}", "delete", "-a", archive))
     assert result == 0
     assert result == 0
 
 
 
 
-def test_list(benchmark, cmd, repo_archive):
+def test_list(benchmark, cmd_fixture, repo_archive):
     repo, archive = repo_archive
     repo, archive = repo_archive
-    result, out = benchmark(cmd, f"--repo={repo}", "list", archive)
+    result, out = benchmark(cmd_fixture, f"--repo={repo}", "list", archive)
     assert result == 0
     assert result == 0
 
 
 
 
-def test_info(benchmark, cmd, repo_archive):
+def test_info(benchmark, cmd_fixture, repo_archive):
     repo, archive = repo_archive
     repo, archive = repo_archive
-    result, out = benchmark(cmd, f"--repo={repo}", "info", "-a", archive)
+    result, out = benchmark(cmd_fixture, f"--repo={repo}", "info", "-a", archive)
     assert result == 0
     assert result == 0
 
 
 
 
-def test_check(benchmark, cmd, repo_archive):
+def test_check(benchmark, cmd_fixture, repo_archive):
     repo, archive = repo_archive
     repo, archive = repo_archive
-    result, out = benchmark(cmd, f"--repo={repo}", "check")
+    result, out = benchmark(cmd_fixture, f"--repo={repo}", "check")
     assert result == 0
     assert result == 0
 
 
 
 
-def test_help(benchmark, cmd):
-    result, out = benchmark(cmd, "help")
+def test_help(benchmark, cmd_fixture):
+    result, out = benchmark(cmd_fixture, "help")
     assert result == 0
     assert result == 0