瀏覽代碼

upgrade: remove the "attic backup" repo upgrader and tests

attic is borg's parent project, but it stalled in 2015 and was not updated since then.

guess we can assume that most attic users have meanwhile noticed this and already
converted their repos to borg.

if some did not yet, they are advised to use borg < 1.3 to do that ASAP.

note: borg can still DETECT an attic repo by recognizing its ATTIC_MAGIC value
      and then gives exactly that advice.
Thomas Waldmann 3 年之前
父節點
當前提交
c7b1cd56d8
共有 6 個文件被更改,包括 10 次插入548 次删除
  1. 7 47
      src/borg/archiver.py
  2. 1 1
      src/borg/repository.py
  3. 0 21
      src/borg/testsuite/archiver.py
  4. 二進制
      src/borg/testsuite/attic.tar.gz
  5. 0 207
      src/borg/testsuite/upgrader.py
  6. 2 272
      src/borg/upgrader.py

+ 7 - 47
src/borg/archiver.py

@@ -86,7 +86,7 @@ try:
     from .remote import RepositoryServer, RemoteRepository, cache_if_remote
     from .repository import Repository, LIST_SCAN_LIMIT, TAG_PUT, TAG_DELETE, TAG_COMMIT
     from .selftest import selftest
-    from .upgrader import AtticRepositoryUpgrader, BorgRepositoryUpgrader
+    from .upgrader import BorgRepositoryUpgrader
 except BaseException:
     # an unhandled exception in the try-block would cause the borg cli command to exit with rc 1 due to python's
     # default behavior, see issue #4424.
@@ -1773,14 +1773,7 @@ class Archiver:
             manifest.write()
             repository.commit(compact=False)
         else:
-            # mainly for upgrades from Attic repositories,
-            # but also supports borg 0.xx -> 1.0 upgrade.
-
-            repo = AtticRepositoryUpgrader(args.location.path, create=False)
-            try:
-                repo.upgrade(args.dry_run, inplace=args.inplace, progress=args.progress)
-            except NotImplementedError as e:
-                print("warning: %s" % e)
+            # mainly for upgrades from borg 0.xx -> 1.0.
             repo = BorgRepositoryUpgrader(args.location.path, create=False)
             try:
                 repo.upgrade(args.dry_run, inplace=args.inplace, progress=args.progress)
@@ -4842,50 +4835,17 @@ class Archiver:
         https://borgbackup.readthedocs.io/en/stable/changes.html#pre-1-0-9-manifest-spoofing-vulnerability
         for details.
 
-        Attic and Borg 0.xx to Borg 1.x
-        +++++++++++++++++++++++++++++++
+        Borg 0.xx to Borg 1.x
+        +++++++++++++++++++++
 
-        This currently supports converting an Attic repository to Borg and also
-        helps with converting Borg 0.xx to 1.0.
+        This currently supports converting Borg 0.xx to 1.0.
 
         Currently, only LOCAL repositories can be upgraded (issue #465).
 
         Please note that ``borg create`` (since 1.0.0) uses bigger chunks by
-        default than old borg or attic did, so the new chunks won't deduplicate
+        default than old borg did, so the new chunks won't deduplicate
         with the old chunks in the upgraded repository.
-        See ``--chunker-params`` option of ``borg create`` and ``borg recreate``.
-
-        ``borg upgrade`` will change the magic strings in the repository's
-        segments to match the new Borg magic strings. The keyfiles found in
-        $ATTIC_KEYS_DIR or ~/.attic/keys/ will also be converted and
-        copied to $BORG_KEYS_DIR or ~/.config/borg/keys.
-
-        The cache files are converted, from $ATTIC_CACHE_DIR or
-        ~/.cache/attic to $BORG_CACHE_DIR or ~/.cache/borg, but the
-        cache layout between Borg and Attic changed, so it is possible
-        the first backup after the conversion takes longer than expected
-        due to the cache resync.
-
-        Upgrade should be able to resume if interrupted, although it
-        will still iterate over all segments. If you want to start
-        from scratch, use `borg delete` over the copied repository to
-        make sure the cache files are also removed::
-
-            borg delete borg
-
-        Unless ``--inplace`` is specified, the upgrade process first creates a backup
-        copy of the repository, in REPOSITORY.before-upgrade-DATETIME, using hardlinks.
-        This requires that the repository and its parent directory reside on same
-        filesystem so the hardlink copy can work.
-        This takes longer than in place upgrades, but is much safer and gives
-        progress information (as opposed to ``cp -al``). Once you are satisfied
-        with the conversion, you can safely destroy the backup copy.
-
-        WARNING: Running the upgrade in place will make the current
-        copy unusable with older version, with no way of going back
-        to previous versions. This can PERMANENTLY DAMAGE YOUR
-        REPOSITORY!  Attic CAN NOT READ BORG REPOSITORIES, as the
-        magic strings have changed. You have been warned.""")
+        See ``--chunker-params`` option of ``borg create`` and ``borg recreate``.""")
         subparser = subparsers.add_parser('upgrade', parents=[common_parser], add_help=False,
                                           description=self.do_upgrade.__doc__,
                                           epilog=upgrade_epilog,

+ 1 - 1
src/borg/repository.py

@@ -139,7 +139,7 @@ class Repository:
         """{} does not have a valid configuration. Check repo config [{}]."""
 
     class AtticRepository(Error):
-        """Attic repository detected. Please run "borg upgrade {}"."""
+        """Attic repository detected. Please use borg < 1.3 to run "borg upgrade {}"."""
 
     class CheckNeeded(ErrorWithTraceback):
         """Inconsistency detected. Please run "borg check {}"."""

+ 0 - 21
src/borg/testsuite/archiver.py

@@ -57,7 +57,6 @@ from . import has_lchflags, llfuse
 from . import BaseTestCase, changedir, environment_variable, no_selinux
 from . import are_symlinks_supported, are_hardlinks_supported, are_fifos_supported, is_utime_fully_supported, is_birthtime_fully_supported
 from .platform import fakeroot_detected
-from .upgrader import make_attic_repo
 from . import key
 
 
@@ -3497,26 +3496,6 @@ id: 2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02
             self.cmd('extract', self.repository_location + '::dst')
         self.assert_dirs_equal('input', 'output/input', ignore_ns=True, ignore_xattrs=True)
 
-    def test_detect_attic_repo(self):
-        path = make_attic_repo(self.repository_path)
-        cmds = [
-            ['create', path + '::test', self.tmpdir],
-            ['extract', path + '::test'],
-            ['check', path],
-            ['rename', path + '::test', 'newname'],
-            ['list', path],
-            ['delete', path],
-            ['prune', path],
-            ['info', path + '::test'],
-            ['key', 'export', path, 'exported'],
-            ['key', 'import', path, 'import'],
-            ['key', 'change-passphrase', path],
-            ['break-lock', path],
-        ]
-        for args in cmds:
-            output = self.cmd(*args, fork=True, exit_code=2)
-            assert 'Attic repository detected.' in output
-
     # derived from test_extract_xattrs_errors()
     @pytest.mark.skipif(not xattr.XATTR_FAKEROOT, reason='xattr not supported on this system or on this version of'
                                                          'fakeroot')

二進制
src/borg/testsuite/attic.tar.gz


+ 0 - 207
src/borg/testsuite/upgrader.py

@@ -1,207 +0,0 @@
-import os
-import tarfile
-
-import pytest
-
-from ..constants import *  # NOQA
-from ..crypto.key import KeyfileKey
-from ..upgrader import AtticRepositoryUpgrader, AtticKeyfileKey
-from ..helpers import get_keys_dir
-from ..repository import Repository
-from . import are_hardlinks_supported
-
-
-# tar with a repo and repo keyfile from attic
-ATTIC_TAR = os.path.join(os.path.dirname(__file__), 'attic.tar.gz')
-
-
-def untar(tarfname, path, what):
-    """
-    extract <tarfname> tar archive to <path>, all stuff starting with <what>.
-
-    return path to <what>.
-    """
-
-    def files(members):
-        for tarinfo in members:
-            if tarinfo.name.startswith(what):
-                yield tarinfo
-
-    with tarfile.open(tarfname, 'r') as tf:
-        tf.extractall(path, members=files(tf))
-
-    return os.path.join(path, what)
-
-
-def repo_valid(path):
-    """
-    utility function to check if borg can open a repository
-
-    :param path: the path to the repository
-    :returns: if borg can check the repository
-    """
-    with Repository(str(path), exclusive=True, create=False) as repository:
-        # can't check raises() because check() handles the error
-        return repository.check()
-
-
-def key_valid(path):
-    """
-    check that the new keyfile is alright
-
-    :param path: the path to the key file
-    :returns: if the file starts with the borg magic string
-    """
-    keyfile = os.path.join(get_keys_dir(),
-                           os.path.basename(path))
-    with open(keyfile) as f:
-        return f.read().startswith(KeyfileKey.FILE_ID)
-
-
-def make_attic_repo(dir):
-    """
-    create an attic repo with some stuff in it
-
-    :param dir: path to the repository to be created
-    :returns: path to attic repository
-    """
-    # there is some stuff in that repo, copied from `RepositoryTestCase.test1`
-    return untar(ATTIC_TAR, str(dir), 'repo')
-
-
-@pytest.fixture()
-def attic_repo(tmpdir):
-    return make_attic_repo(tmpdir)
-
-
-@pytest.fixture(params=[True, False])
-def inplace(request):
-    return request.param
-
-
-def test_convert_segments(attic_repo, inplace):
-    """test segment conversion
-
-    this will load the given attic repository, list all the segments
-    then convert them one at a time. we need to close the repo before
-    conversion otherwise we have errors from borg
-
-    :param attic_repo: a populated attic repository (fixture)
-    """
-    repo_path = attic_repo
-    with pytest.raises(Repository.AtticRepository):
-        repo_valid(repo_path)
-    repository = AtticRepositoryUpgrader(repo_path, create=False)
-    with repository:
-        segments = [filename for i, filename in repository.io.segment_iterator()]
-    repository.convert_segments(segments, dryrun=False, inplace=inplace)
-    repository.convert_cache(dryrun=False)
-    assert repo_valid(repo_path)
-
-
-@pytest.fixture()
-def attic_key_file(tmpdir, monkeypatch):
-    """
-    create an attic key file from the given repo, in the keys
-    subdirectory of the given tmpdir
-
-    :param tmpdir: a temporary directory (a builtin fixture)
-    :returns: path to key file
-    """
-    keys_dir = untar(ATTIC_TAR, str(tmpdir), 'keys')
-
-    # we use the repo dir for the created keyfile, because we do
-    # not want to clutter existing keyfiles
-    monkeypatch.setenv('ATTIC_KEYS_DIR', keys_dir)
-
-    # we use the same directory for the converted files, which
-    # will clutter the previously created one, which we don't care
-    # about anyways. in real runs, the original key will be retained.
-    monkeypatch.setenv('BORG_KEYS_DIR', keys_dir)
-    monkeypatch.setenv('ATTIC_PASSPHRASE', 'test')
-
-    return os.path.join(keys_dir, 'repo')
-
-
-def test_keys(attic_repo, attic_key_file):
-    """test key conversion
-
-    test that we can convert the given key to a properly formatted
-    borg key. assumes that the ATTIC_KEYS_DIR and BORG_KEYS_DIR have
-    been properly populated by the attic_key_file fixture.
-
-    :param attic_repo: path to an attic repository (fixture defined above)
-    :param attic_key_file: path to an attic key file (fixture defined above)
-    """
-    keyfile_path = attic_key_file
-    assert not key_valid(keyfile_path)  # not upgraded yet
-    with AtticRepositoryUpgrader(attic_repo, create=False) as repository:
-        keyfile = AtticKeyfileKey.find_key_file(repository)
-        AtticRepositoryUpgrader.convert_keyfiles(keyfile, dryrun=False)
-    assert key_valid(keyfile_path)
-
-
-@pytest.mark.skipif(not are_hardlinks_supported(), reason='hardlinks not supported')
-def test_convert_all(attic_repo, attic_key_file, inplace):
-    """test all conversion steps
-
-    this runs everything. mostly redundant test, since everything is
-    done above. yet we expect a NotImplementedError because we do not
-    convert caches yet.
-
-    :param attic_repo: path to an attic repository (fixture defined above)
-    :param attic_key_file: path to an attic key file (fixture defined above)
-    """
-    repo_path = attic_repo
-
-    with pytest.raises(Repository.AtticRepository):
-        repo_valid(repo_path)
-
-    def stat_segment(path):
-        return os.stat(os.path.join(path, 'data', '0', '0'))
-
-    def first_inode(path):
-        return stat_segment(path).st_ino
-
-    orig_inode = first_inode(repo_path)
-    with AtticRepositoryUpgrader(repo_path, create=False) as repository:
-        # replicate command dispatch, partly
-        os.umask(UMASK_DEFAULT)
-        backup = repository.upgrade(dryrun=False, inplace=inplace)  # note: uses hardlinks internally
-        if inplace:
-            assert backup is None
-            assert first_inode(repository.path) == orig_inode
-        else:
-            assert backup
-            assert first_inode(repository.path) != first_inode(backup)
-            # i have seen cases where the copied tree has world-readable
-            # permissions, which is wrong
-            if 'BORG_TESTS_IGNORE_MODES' not in os.environ:
-                assert stat_segment(backup).st_mode & UMASK_DEFAULT == 0
-
-    assert key_valid(attic_key_file)
-    assert repo_valid(repo_path)
-
-
-@pytest.mark.skipif(not are_hardlinks_supported(), reason='hardlinks not supported')
-def test_hardlink(tmpdir, inplace):
-    """test that we handle hard links properly
-
-    that is, if we are in "inplace" mode, hardlinks should *not*
-    change (ie. we write to the file directly, so we do not rewrite the
-    whole file, and we do not re-create the file).
-
-    if we are *not* in inplace mode, then the inode should change, as
-    we are supposed to leave the original inode alone."""
-    a = str(tmpdir.join('a'))
-    with open(a, 'wb') as tmp:
-        tmp.write(b'aXXX')
-    b = str(tmpdir.join('b'))
-    os.link(a, b)
-    AtticRepositoryUpgrader.header_replace(b, b'a', b'b', inplace=inplace)
-    if not inplace:
-        assert os.stat(a).st_ino != os.stat(b).st_ino
-    else:
-        assert os.stat(a).st_ino == os.stat(b).st_ino
-    with open(b, 'rb') as tmp:
-        assert tmp.read() == b'bXXX'

+ 2 - 272
src/borg/upgrader.py

@@ -1,281 +1,12 @@
-import datetime
 import os
-import shutil
-import time
 
 from .crypto.key import KeyfileKey, KeyfileNotFoundError
-from .constants import REPOSITORY_README
-from .helpers import ProgressIndicatorPercent
-from .helpers import get_base_dir, get_keys_dir, get_cache_dir
-from .locking import Lock
+from .helpers import get_base_dir, get_keys_dir
 from .logger import create_logger
-from .repository import Repository, MAGIC
+from .repository import Repository
 
 logger = create_logger(__name__)
 
-ATTIC_MAGIC = b'ATTICSEG'
-
-
-class AtticRepositoryUpgrader(Repository):
-    def __init__(self, *args, **kw):
-        kw['lock'] = False  # do not create borg lock files (now) in attic repo
-        kw['check_segment_magic'] = False  # skip the Attic check when upgrading
-        super().__init__(*args, **kw)
-
-    def upgrade(self, dryrun=True, inplace=False, progress=False):
-        """convert an attic repository to a borg repository
-
-        those are the files that need to be upgraded here, from most
-        important to least important: segments, key files, and various
-        caches, the latter being optional, as they will be rebuilt if
-        missing.
-
-        we nevertheless do the order in reverse, as we prefer to do
-        the fast stuff first, to improve interactivity.
-        """
-        with self:
-            backup = None
-            if not inplace:
-                backup = f'{self.path}.before-upgrade-{datetime.datetime.now():%Y-%m-%d-%H:%M:%S}'
-                logger.info('making a hardlink copy in %s', backup)
-                if not dryrun:
-                    shutil.copytree(self.path, backup, copy_function=os.link)
-            logger.info("opening attic repository with borg and converting")
-            # now lock the repo, after we have made the copy
-            self.lock = Lock(os.path.join(self.path, 'lock'), exclusive=True, timeout=1.0).acquire()
-            segments = [filename for i, filename in self.io.segment_iterator()]
-            try:
-                keyfile = self.find_attic_keyfile()
-            except KeyfileNotFoundError:
-                logger.warning("no key file found for repository")
-            else:
-                self.convert_keyfiles(keyfile, dryrun)
-        # partial open: just hold on to the lock
-        self.lock = Lock(os.path.join(self.path, 'lock'), exclusive=True).acquire()
-        try:
-            self.convert_cache(dryrun)
-            self.convert_repo_index(dryrun=dryrun, inplace=inplace)
-            self.convert_segments(segments, dryrun=dryrun, inplace=inplace, progress=progress)
-            self.borg_readme()
-        finally:
-            self.lock.release()
-            self.lock = None
-        return backup
-
-    def borg_readme(self):
-        readme = os.path.join(self.path, 'README')
-        os.remove(readme)
-        with open(readme, 'w') as fd:
-            fd.write(REPOSITORY_README)
-
-    @staticmethod
-    def convert_segments(segments, dryrun=True, inplace=False, progress=False):
-        """convert repository segments from attic to borg
-
-        replacement pattern is `s/ATTICSEG/BORG_SEG/` in files in
-        `$ATTIC_REPO/data/**`.
-
-        luckily the magic string length didn't change so we can just
-        replace the 8 first bytes of all regular files in there."""
-        logger.info("converting %d segments..." % len(segments))
-        segment_count = len(segments)
-        pi = ProgressIndicatorPercent(total=segment_count, msg="Converting segments %3.0f%%", msgid='upgrade.convert_segments')
-        for i, filename in enumerate(segments):
-            if progress:
-                pi.show(i)
-            if dryrun:
-                time.sleep(0.001)
-            else:
-                AtticRepositoryUpgrader.header_replace(filename, ATTIC_MAGIC, MAGIC, inplace=inplace)
-        if progress:
-            pi.finish()
-
-    @staticmethod
-    def header_replace(filename, old_magic, new_magic, inplace=True):
-        with open(filename, 'r+b') as segment:
-            segment.seek(0)
-            # only write if necessary
-            if segment.read(len(old_magic)) == old_magic:
-                if inplace:
-                    segment.seek(0)
-                    segment.write(new_magic)
-                else:
-                    # rename the hardlink and rewrite the file. this works
-                    # because the file is still open. so even though the file
-                    # is renamed, we can still read it until it is closed.
-                    os.rename(filename, filename + '.tmp')
-                    with open(filename, 'wb') as new_segment:
-                        new_segment.write(new_magic)
-                        new_segment.write(segment.read())
-                    # the little dance with the .tmp file is necessary
-                    # because Windows won't allow overwriting an open file.
-                    os.unlink(filename + '.tmp')
-
-    def find_attic_keyfile(self):
-        """find the attic keyfiles
-
-        the keyfiles are loaded by `KeyfileKey.find_key_file()`. that
-        finds the keys with the right identifier for the repo.
-
-        this is expected to look into $HOME/.attic/keys or
-        $ATTIC_KEYS_DIR for key files matching the given Borg
-        repository.
-
-        it is expected to raise an exception (KeyfileNotFoundError) if
-        no key is found. whether that exception is from Borg or Attic
-        is unclear.
-
-        this is split in a separate function in case we want to use
-        the attic code here directly, instead of our local
-        implementation."""
-        return AtticKeyfileKey.find_key_file(self)
-
-    @staticmethod
-    def convert_keyfiles(keyfile, dryrun):
-        """convert key files from attic to borg
-
-        replacement pattern is `s/ATTIC KEY/BORG_KEY/` in
-        `get_keys_dir()`, that is `$ATTIC_KEYS_DIR` or
-        `$HOME/.attic/keys`, and moved to `$BORG_KEYS_DIR` or
-        `$HOME/.config/borg/keys`.
-
-        no need to decrypt to convert. we need to rewrite the whole
-        key file because magic string length changed, but that's not a
-        problem because the keyfiles are small (compared to, say,
-        all the segments)."""
-        logger.info("converting keyfile %s" % keyfile)
-        with open(keyfile) as f:
-            data = f.read()
-        data = data.replace(AtticKeyfileKey.FILE_ID, KeyfileKey.FILE_ID, 1)
-        keyfile = os.path.join(get_keys_dir(), os.path.basename(keyfile))
-        logger.info("writing borg keyfile to %s" % keyfile)
-        if not dryrun:
-            with open(keyfile, 'w') as f:
-                f.write(data)
-
-    def convert_repo_index(self, dryrun, inplace):
-        """convert some repo files
-
-        those are all hash indexes, so we need to
-        `s/ATTICIDX/BORG_IDX/` in a few locations:
-
-        * the repository index (in `$ATTIC_REPO/index.%d`, where `%d`
-          is the `Repository.get_index_transaction_id()`), which we
-          should probably update, with a lock, see
-          `Repository.open()`, which i'm not sure we should use
-          because it may write data on `Repository.close()`...
-        """
-        transaction_id = self.get_index_transaction_id()
-        if transaction_id is None:
-            logger.warning('no index file found for repository %s' % self.path)
-        else:
-            index = os.path.join(self.path, 'index.%d' % transaction_id)
-            logger.info("converting repo index %s" % index)
-            if not dryrun:
-                AtticRepositoryUpgrader.header_replace(index, b'ATTICIDX', b'BORG_IDX', inplace=inplace)
-
-    def convert_cache(self, dryrun):
-        """convert caches from attic to borg
-
-        those are all hash indexes, so we need to
-        `s/ATTICIDX/BORG_IDX/` in a few locations:
-
-        * the `files` and `chunks` cache (in `$ATTIC_CACHE_DIR` or
-          `$HOME/.cache/attic/<repoid>/`), which we could just drop,
-          but if we'd want to convert, we could open it with the
-          `Cache.open()`, edit in place and then `Cache.close()` to
-          make sure we have locking right
-        """
-        # copy of attic's get_cache_dir()
-        attic_cache_dir = os.environ.get('ATTIC_CACHE_DIR',
-                                         os.path.join(get_base_dir(),
-                                                      '.cache', 'attic'))
-        attic_cache_dir = os.path.join(attic_cache_dir, self.id_str)
-        borg_cache_dir = os.path.join(get_cache_dir(), self.id_str)
-
-        def copy_cache_file(path):
-            """copy the given attic cache path into the borg directory
-
-            does nothing if dryrun is True. also expects
-            attic_cache_dir and borg_cache_dir to be set in the parent
-            scope, to the directories path including the repository
-            identifier.
-
-            :params path: the basename of the cache file to copy
-            (example: "files" or "chunks") as a string
-
-            :returns: the borg file that was created or None if no
-            Attic cache file was found.
-
-            """
-            attic_file = os.path.join(attic_cache_dir, path)
-            if os.path.exists(attic_file):
-                borg_file = os.path.join(borg_cache_dir, path)
-                if os.path.exists(borg_file):
-                    logger.warning("borg cache file already exists in %s, not copying from Attic", borg_file)
-                else:
-                    logger.info(f"copying attic cache file from {attic_file} to {borg_file}")
-                    if not dryrun:
-                        shutil.copyfile(attic_file, borg_file)
-                return borg_file
-            else:
-                logger.warning(f"no {path} cache file found in {attic_file}")
-                return None
-
-        # XXX: untested, because generating cache files is a PITA, see
-        # Archiver.do_create() for proof
-        if os.path.exists(attic_cache_dir):
-            if not os.path.exists(borg_cache_dir):
-                os.makedirs(borg_cache_dir)
-
-            # file that we don't have a header to convert, just copy
-            for cache in ['config', 'files']:
-                copy_cache_file(cache)
-
-            # we need to convert the headers of those files, copy first
-            for cache in ['chunks']:
-                cache = copy_cache_file(cache)
-                logger.info("converting cache %s" % cache)
-                if not dryrun:
-                    AtticRepositoryUpgrader.header_replace(cache, b'ATTICIDX', b'BORG_IDX')
-
-
-class AtticKeyfileKey(KeyfileKey):
-    """backwards compatible Attic key file parser"""
-    FILE_ID = 'ATTIC KEY'
-
-    # verbatim copy from attic
-    @staticmethod
-    def get_keys_dir():
-        """Determine where to repository keys and cache"""
-        return os.environ.get('ATTIC_KEYS_DIR',
-                              os.path.join(get_base_dir(), '.attic', 'keys'))
-
-    @classmethod
-    def find_key_file(cls, repository):
-        """copy of attic's `find_key_file`_
-
-        this has two small modifications:
-
-        1. it uses the above `get_keys_dir`_ instead of the global one,
-           assumed to be borg's
-
-        2. it uses `repository.path`_ instead of
-           `repository._location.canonical_path`_ because we can't
-           assume the repository has been opened by the archiver yet
-        """
-        get_keys_dir = cls.get_keys_dir
-        keys_dir = get_keys_dir()
-        if not os.path.exists(keys_dir):
-            raise KeyfileNotFoundError(repository.path, keys_dir)
-        for name in os.listdir(keys_dir):
-            filename = os.path.join(keys_dir, name)
-            with open(filename) as fd:
-                line = fd.readline().strip()
-                if line and line.startswith(cls.FILE_ID) and line[10:] == repository.id_str:
-                    return filename
-        raise KeyfileNotFoundError(repository.path, keys_dir)
-
 
 class BorgRepositoryUpgrader(Repository):
     def upgrade(self, dryrun=True, inplace=False, progress=False):
@@ -299,7 +30,6 @@ class BorgRepositoryUpgrader(Repository):
         try:
             os.rename(keyfile, new_keyfile)
         except FileExistsError:
-            # likely the attic -> borg upgrader already put it in the final location
             pass