Browse Source

backport bin_to_hex and use it

simplifies code and also porting and merging between 1.0 and 1.1/master.
Thomas Waldmann 8 years ago
parent
commit
67aafec195
9 changed files with 45 additions and 42 deletions
  1. 4 5
      borg/archive.py
  2. 5 5
      borg/archiver.py
  3. 8 8
      borg/cache.py
  4. 5 0
      borg/helpers.py
  5. 3 3
      borg/key.py
  6. 7 7
      borg/keymanager.py
  7. 3 3
      borg/repository.py
  8. 5 5
      borg/testsuite/archiver.py
  9. 5 6
      borg/upgrader.py

+ 4 - 5
borg/archive.py

@@ -1,4 +1,3 @@
-from binascii import hexlify
 from contextlib import contextmanager
 from contextlib import contextmanager
 from datetime import datetime, timezone
 from datetime import datetime, timezone
 from getpass import getuser
 from getpass import getuser
@@ -17,7 +16,7 @@ import sys
 import time
 import time
 from io import BytesIO
 from io import BytesIO
 from . import xattr
 from . import xattr
-from .helpers import Error, uid2user, user2uid, gid2group, group2gid, \
+from .helpers import Error, uid2user, user2uid, gid2group, group2gid, bin_to_hex, \
     parse_timestamp, to_localtime, format_time, format_timedelta, remove_surrogates, \
     parse_timestamp, to_localtime, format_time, format_timedelta, remove_surrogates, \
     Manifest, Statistics, decode_dict, make_path_safe, StableDict, int_to_bigint, bigint_to_int, \
     Manifest, Statistics, decode_dict, make_path_safe, StableDict, int_to_bigint, bigint_to_int, \
     ProgressIndicatorPercent
     ProgressIndicatorPercent
@@ -254,7 +253,7 @@ class Archive:
 
 
     @property
     @property
     def fpr(self):
     def fpr(self):
-        return hexlify(self.id).decode('ascii')
+        return bin_to_hex(self.id)
 
 
     @property
     @property
     def duration(self):
     def duration(self):
@@ -522,7 +521,7 @@ Number of files: {0.stats.nfiles}'''.format(
             try:
             try:
                 self.cache.chunk_decref(id, stats)
                 self.cache.chunk_decref(id, stats)
             except KeyError:
             except KeyError:
-                cid = hexlify(id).decode('ascii')
+                cid = bin_to_hex(id)
                 raise ChunksIndexError(cid)
                 raise ChunksIndexError(cid)
             except Repository.ObjectNotFound as e:
             except Repository.ObjectNotFound as e:
                 # object not in repo - strange, but we wanted to delete it anyway.
                 # object not in repo - strange, but we wanted to delete it anyway.
@@ -1010,7 +1009,7 @@ class ArchiveChecker:
                 return _state
                 return _state
 
 
             def report(msg, chunk_id, chunk_no):
             def report(msg, chunk_id, chunk_no):
-                cid = hexlify(chunk_id).decode('ascii')
+                cid = bin_to_hex(chunk_id)
                 msg += ' [chunk: %06d_%s]' % (chunk_no, cid)  # see debug-dump-archive-items
                 msg += ' [chunk: %06d_%s]' % (chunk_no, cid)  # see debug-dump-archive-items
                 self.error_found = True
                 self.error_found = True
                 logger.error(msg)
                 logger.error(msg)

+ 5 - 5
borg/archiver.py

@@ -1,4 +1,4 @@
-from binascii import hexlify, unhexlify
+from binascii import unhexlify
 from datetime import datetime
 from datetime import datetime
 from hashlib import sha256
 from hashlib import sha256
 from operator import attrgetter
 from operator import attrgetter
@@ -18,7 +18,7 @@ import collections
 
 
 from . import __version__
 from . import __version__
 from .helpers import Error, location_validator, archivename_validator, format_line, format_time, format_file_size, \
 from .helpers import Error, location_validator, archivename_validator, format_line, format_time, format_file_size, \
-    parse_pattern, PathPrefixPattern, to_localtime, timestamp, safe_timestamp, \
+    parse_pattern, PathPrefixPattern, to_localtime, timestamp, safe_timestamp, bin_to_hex, \
     get_cache_dir, prune_within, prune_split, \
     get_cache_dir, prune_within, prune_split, \
     Manifest, NoManifestError, remove_surrogates, update_excludes, format_archive, check_extension_modules, Statistics, \
     Manifest, NoManifestError, remove_surrogates, update_excludes, format_archive, check_extension_modules, Statistics, \
     dir_is_tagged, bigint_to_int, ChunkerParams, CompressionSpec, PrefixSpec, is_slow_msgpack, yes, sysinfo, \
     dir_is_tagged, bigint_to_int, ChunkerParams, CompressionSpec, PrefixSpec, is_slow_msgpack, yes, sysinfo, \
@@ -631,7 +631,7 @@ class Archiver:
         """Show archive details such as disk space used"""
         """Show archive details such as disk space used"""
         stats = archive.calc_stats(cache)
         stats = archive.calc_stats(cache)
         print('Name:', archive.name)
         print('Name:', archive.name)
-        print('Fingerprint: %s' % hexlify(archive.id).decode('ascii'))
+        print('Fingerprint: %s' % bin_to_hex(archive.id))
         print('Hostname:', archive.metadata[b'hostname'])
         print('Hostname:', archive.metadata[b'hostname'])
         print('Username:', archive.metadata[b'username'])
         print('Username:', archive.metadata[b'username'])
         print('Time (start): %s' % format_time(to_localtime(archive.ts)))
         print('Time (start): %s' % format_time(to_localtime(archive.ts)))
@@ -727,7 +727,7 @@ class Archiver:
         archive = Archive(repository, key, manifest, args.location.archive)
         archive = Archive(repository, key, manifest, args.location.archive)
         for i, item_id in enumerate(archive.metadata[b'items']):
         for i, item_id in enumerate(archive.metadata[b'items']):
             data = key.decrypt(item_id, repository.get(item_id))
             data = key.decrypt(item_id, repository.get(item_id))
-            filename = '%06d_%s.items' % (i, hexlify(item_id).decode('ascii'))
+            filename = '%06d_%s.items' % (i, bin_to_hex(item_id))
             print('Dumping', filename)
             print('Dumping', filename)
             with open(filename, 'wb') as fd:
             with open(filename, 'wb') as fd:
                 fd.write(data)
                 fd.write(data)
@@ -748,7 +748,7 @@ class Archiver:
                 cdata = repository.get(id)
                 cdata = repository.get(id)
                 give_id = id if id != Manifest.MANIFEST_ID else None
                 give_id = id if id != Manifest.MANIFEST_ID else None
                 data = key.decrypt(give_id, cdata)
                 data = key.decrypt(give_id, cdata)
-                filename = '%06d_%s.obj' % (i, hexlify(id).decode('ascii'))
+                filename = '%06d_%s.obj' % (i, bin_to_hex(id))
                 print('Dumping', filename)
                 print('Dumping', filename)
                 with open(filename, 'wb') as fd:
                 with open(filename, 'wb') as fd:
                     fd.write(data)
                     fd.write(data)

+ 8 - 8
borg/cache.py

@@ -3,14 +3,14 @@ from .remote import cache_if_remote
 from collections import namedtuple
 from collections import namedtuple
 import os
 import os
 import stat
 import stat
-from binascii import hexlify, unhexlify
+from binascii import unhexlify
 import shutil
 import shutil
 
 
 from .key import PlaintextKey
 from .key import PlaintextKey
 from .logger import create_logger
 from .logger import create_logger
 logger = create_logger()
 logger = create_logger()
 from .helpers import Error, get_cache_dir, decode_dict, int_to_bigint, \
 from .helpers import Error, get_cache_dir, decode_dict, int_to_bigint, \
-    bigint_to_int, format_file_size, yes
+    bigint_to_int, format_file_size, yes, bin_to_hex
 from .locking import Lock
 from .locking import Lock
 from .hashindex import ChunkIndex
 from .hashindex import ChunkIndex
 
 
@@ -34,13 +34,13 @@ class Cache:
 
 
     @staticmethod
     @staticmethod
     def break_lock(repository, path=None):
     def break_lock(repository, path=None):
-        path = path or os.path.join(get_cache_dir(), hexlify(repository.id).decode('ascii'))
+        path = path or os.path.join(get_cache_dir(), bin_to_hex(repository.id))
         Lock(os.path.join(path, 'lock'), exclusive=True).break_lock()
         Lock(os.path.join(path, 'lock'), exclusive=True).break_lock()
 
 
     @staticmethod
     @staticmethod
     def destroy(repository, path=None):
     def destroy(repository, path=None):
         """destroy the cache for ``repository`` or at ``path``"""
         """destroy the cache for ``repository`` or at ``path``"""
-        path = path or os.path.join(get_cache_dir(), hexlify(repository.id).decode('ascii'))
+        path = path or os.path.join(get_cache_dir(), bin_to_hex(repository.id))
         config = os.path.join(path, 'config')
         config = os.path.join(path, 'config')
         if os.path.exists(config):
         if os.path.exists(config):
             os.remove(config)  # kill config first
             os.remove(config)  # kill config first
@@ -55,7 +55,7 @@ class Cache:
         self.repository = repository
         self.repository = repository
         self.key = key
         self.key = key
         self.manifest = manifest
         self.manifest = manifest
-        self.path = path or os.path.join(get_cache_dir(), hexlify(repository.id).decode('ascii'))
+        self.path = path or os.path.join(get_cache_dir(), bin_to_hex(repository.id))
         self.do_files = do_files
         self.do_files = do_files
         # Warn user before sending data to a never seen before unencrypted repository
         # Warn user before sending data to a never seen before unencrypted repository
         if not os.path.exists(self.path):
         if not os.path.exists(self.path):
@@ -122,7 +122,7 @@ Chunk index:    {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
         config = configparser.ConfigParser(interpolation=None)
         config = configparser.ConfigParser(interpolation=None)
         config.add_section('cache')
         config.add_section('cache')
         config.set('cache', 'version', '1')
         config.set('cache', 'version', '1')
-        config.set('cache', 'repository', hexlify(self.repository.id).decode('ascii'))
+        config.set('cache', 'repository', bin_to_hex(self.repository.id))
         config.set('cache', 'manifest', '')
         config.set('cache', 'manifest', '')
         with open(os.path.join(self.path, 'config'), 'w') as fd:
         with open(os.path.join(self.path, 'config'), 'w') as fd:
             config.write(fd)
             config.write(fd)
@@ -208,7 +208,7 @@ Chunk index:    {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
                     if age == 0 and bigint_to_int(item[3]) < self._newest_mtime or \
                     if age == 0 and bigint_to_int(item[3]) < self._newest_mtime or \
                        age > 0 and age < ttl:
                        age > 0 and age < ttl:
                         msgpack.pack((path_hash, item), fd)
                         msgpack.pack((path_hash, item), fd)
-        self.config.set('cache', 'manifest', hexlify(self.manifest.id).decode('ascii'))
+        self.config.set('cache', 'manifest', bin_to_hex(self.manifest.id))
         self.config.set('cache', 'timestamp', self.manifest.timestamp)
         self.config.set('cache', 'timestamp', self.manifest.timestamp)
         self.config.set('cache', 'key_type', str(self.key.TYPE))
         self.config.set('cache', 'key_type', str(self.key.TYPE))
         self.config.set('cache', 'previous_location', self.repository._location.canonical_path())
         self.config.set('cache', 'previous_location', self.repository._location.canonical_path())
@@ -251,7 +251,7 @@ Chunk index:    {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
         archive_path = os.path.join(self.path, 'chunks.archive.d')
         archive_path = os.path.join(self.path, 'chunks.archive.d')
 
 
         def mkpath(id, suffix=''):
         def mkpath(id, suffix=''):
-            id_hex = hexlify(id).decode('ascii')
+            id_hex = bin_to_hex(id)
             path = os.path.join(archive_path, id_hex + suffix)
             path = os.path.join(archive_path, id_hex + suffix)
             return path.encode('utf-8')
             return path.encode('utf-8')
 
 

+ 5 - 0
borg/helpers.py

@@ -1,4 +1,5 @@
 import argparse
 import argparse
+from binascii import hexlify
 from collections import namedtuple
 from collections import namedtuple
 import contextlib
 import contextlib
 from functools import wraps
 from functools import wraps
@@ -759,6 +760,10 @@ def safe_encode(s, coding='utf-8', errors='surrogateescape'):
     return s.encode(coding, errors)
     return s.encode(coding, errors)
 
 
 
 
+def bin_to_hex(binary):
+    return hexlify(binary).decode('ascii')
+
+
 class Location:
 class Location:
     """Object representing a repository / archive location
     """Object representing a repository / archive location
     """
     """

+ 3 - 3
borg/key.py

@@ -7,7 +7,7 @@ import textwrap
 from hmac import HMAC, compare_digest
 from hmac import HMAC, compare_digest
 from hashlib import sha256, pbkdf2_hmac
 from hashlib import sha256, pbkdf2_hmac
 
 
-from .helpers import IntegrityError, get_keys_dir, Error, yes
+from .helpers import IntegrityError, get_keys_dir, Error, yes, bin_to_hex
 from .logger import create_logger
 from .logger import create_logger
 logger = create_logger()
 logger = create_logger()
 
 
@@ -201,7 +201,7 @@ class Passphrase(str):
                 passphrase.encode('ascii')
                 passphrase.encode('ascii')
             except UnicodeEncodeError:
             except UnicodeEncodeError:
                 print('Your passphrase (UTF-8 encoding in hex): %s' %
                 print('Your passphrase (UTF-8 encoding in hex): %s' %
-                      hexlify(passphrase.encode('utf-8')).decode('ascii'),
+                      bin_to_hex(passphrase.encode('utf-8')),
                       file=sys.stderr)
                       file=sys.stderr)
                 print('As you have a non-ASCII passphrase, it is recommended to keep the UTF-8 encoding in hex together with the passphrase at a safe place.',
                 print('As you have a non-ASCII passphrase, it is recommended to keep the UTF-8 encoding in hex together with the passphrase at a safe place.',
                       file=sys.stderr)
                       file=sys.stderr)
@@ -427,7 +427,7 @@ class KeyfileKey(KeyfileKeyBase):
     def save(self, target, passphrase):
     def save(self, target, passphrase):
         key_data = self._save(passphrase)
         key_data = self._save(passphrase)
         with open(target, 'w') as fd:
         with open(target, 'w') as fd:
-            fd.write('%s %s\n' % (self.FILE_ID, hexlify(self.repository_id).decode('ascii')))
+            fd.write('%s %s\n' % (self.FILE_ID, bin_to_hex(self.repository_id)))
             fd.write(key_data)
             fd.write(key_data)
             fd.write('\n')
             fd.write('\n')
         self.target = target
         self.target = target

+ 7 - 7
borg/keymanager.py

@@ -1,10 +1,10 @@
-from binascii import hexlify, unhexlify, a2b_base64, b2a_base64
+from binascii import unhexlify, a2b_base64, b2a_base64
 import binascii
 import binascii
 import textwrap
 import textwrap
 from hashlib import sha256
 from hashlib import sha256
 
 
 from .key import KeyfileKey, RepoKey, PassphraseKey, KeyfileNotFoundError, PlaintextKey
 from .key import KeyfileKey, RepoKey, PassphraseKey, KeyfileNotFoundError, PlaintextKey
-from .helpers import Manifest, NoManifestError, Error, yes
+from .helpers import Manifest, NoManifestError, Error, yes, bin_to_hex
 from .repository import Repository
 from .repository import Repository
 
 
 
 
@@ -79,7 +79,7 @@ class KeyManager:
 
 
     def store_keyfile(self, target):
     def store_keyfile(self, target):
         with open(target, 'w') as fd:
         with open(target, 'w') as fd:
-            fd.write('%s %s\n' % (KeyfileKey.FILE_ID, hexlify(self.repository.id).decode('ascii')))
+            fd.write('%s %s\n' % (KeyfileKey.FILE_ID, bin_to_hex(self.repository.id)))
             fd.write(self.keyblob)
             fd.write(self.keyblob)
             if not self.keyblob.endswith('\n'):
             if not self.keyblob.endswith('\n'):
                 fd.write('\n')
                 fd.write('\n')
@@ -103,7 +103,7 @@ class KeyManager:
         binary = a2b_base64(self.keyblob)
         binary = a2b_base64(self.keyblob)
         export += 'BORG PAPER KEY v1\n'
         export += 'BORG PAPER KEY v1\n'
         lines = (len(binary) + 17) // 18
         lines = (len(binary) + 17) // 18
-        repoid = hexlify(self.repository.id).decode('ascii')[:18]
+        repoid = bin_to_hex(self.repository.id)[:18]
         complete_checksum = sha256_truncated(binary, 12)
         complete_checksum = sha256_truncated(binary, 12)
         export += 'id: {0:d} / {1} / {2} - {3}\n'.format(lines,
         export += 'id: {0:d} / {1} / {2} - {3}\n'.format(lines,
                                        grouped(repoid),
                                        grouped(repoid),
@@ -114,7 +114,7 @@ class KeyManager:
             idx += 1
             idx += 1
             binline = binary[:18]
             binline = binary[:18]
             checksum = sha256_truncated(idx.to_bytes(2, byteorder='big') + binline, 2)
             checksum = sha256_truncated(idx.to_bytes(2, byteorder='big') + binline, 2)
-            export += '{0:2d}: {1} - {2}\n'.format(idx, grouped(hexlify(binline).decode('ascii')), checksum)
+            export += '{0:2d}: {1} - {2}\n'.format(idx, grouped(bin_to_hex(binline)), checksum)
             binary = binary[18:]
             binary = binary[18:]
 
 
         if path:
         if path:
@@ -125,7 +125,7 @@ class KeyManager:
 
 
     def import_keyfile(self, args):
     def import_keyfile(self, args):
         file_id = KeyfileKey.FILE_ID
         file_id = KeyfileKey.FILE_ID
-        first_line = file_id + ' ' + hexlify(self.repository.id).decode('ascii') + '\n'
+        first_line = file_id + ' ' + bin_to_hex(self.repository.id) + '\n'
         with open(args.path, 'r') as fd:
         with open(args.path, 'r') as fd:
             file_first_line = fd.read(len(first_line))
             file_first_line = fd.read(len(first_line))
             if file_first_line != first_line:
             if file_first_line != first_line:
@@ -141,7 +141,7 @@ class KeyManager:
         # imported here because it has global side effects
         # imported here because it has global side effects
         import readline
         import readline
 
 
-        repoid = hexlify(self.repository.id).decode('ascii')[:18]
+        repoid = bin_to_hex(self.repository.id)[:18]
         try:
         try:
             while True:  # used for repeating on overall checksum mismatch
             while True:  # used for repeating on overall checksum mismatch
                 # id line input
                 # id line input

+ 3 - 3
borg/repository.py

@@ -1,5 +1,5 @@
 from configparser import ConfigParser
 from configparser import ConfigParser
-from binascii import hexlify, unhexlify
+from binascii import unhexlify
 from datetime import datetime
 from datetime import datetime
 from itertools import islice
 from itertools import islice
 import errno
 import errno
@@ -12,7 +12,7 @@ import struct
 from zlib import crc32
 from zlib import crc32
 
 
 import msgpack
 import msgpack
-from .helpers import Error, ErrorWithTraceback, IntegrityError, Location, ProgressIndicatorPercent
+from .helpers import Error, ErrorWithTraceback, IntegrityError, Location, ProgressIndicatorPercent, bin_to_hex
 from .hashindex import NSIndex
 from .hashindex import NSIndex
 from .locking import Lock, LockError, LockErrorT
 from .locking import Lock, LockError, LockErrorT
 from .lrucache import LRUCache
 from .lrucache import LRUCache
@@ -109,7 +109,7 @@ class Repository:
         config.set('repository', 'segments_per_dir', str(self.DEFAULT_SEGMENTS_PER_DIR))
         config.set('repository', 'segments_per_dir', str(self.DEFAULT_SEGMENTS_PER_DIR))
         config.set('repository', 'max_segment_size', str(self.DEFAULT_MAX_SEGMENT_SIZE))
         config.set('repository', 'max_segment_size', str(self.DEFAULT_MAX_SEGMENT_SIZE))
         config.set('repository', 'append_only', str(int(self.append_only)))
         config.set('repository', 'append_only', str(int(self.append_only)))
-        config.set('repository', 'id', hexlify(os.urandom(32)).decode('ascii'))
+        config.set('repository', 'id', bin_to_hex(os.urandom(32)))
         self.save_config(path, config)
         self.save_config(path, config)
 
 
     def save_config(self, path, config):
     def save_config(self, path, config):

+ 5 - 5
borg/testsuite/archiver.py

@@ -1,4 +1,4 @@
-from binascii import hexlify, unhexlify, b2a_base64
+from binascii import unhexlify, b2a_base64
 from configparser import ConfigParser
 from configparser import ConfigParser
 import errno
 import errno
 import os
 import os
@@ -21,7 +21,7 @@ from ..archive import Archive, ChunkBuffer, CHUNK_MAX_EXP, flags_noatime, flags_
 from ..archiver import Archiver
 from ..archiver import Archiver
 from ..cache import Cache
 from ..cache import Cache
 from ..crypto import bytes_to_long, num_aes_blocks
 from ..crypto import bytes_to_long, num_aes_blocks
-from ..helpers import Manifest, PatternMatcher, parse_pattern, EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR
+from ..helpers import Manifest, PatternMatcher, parse_pattern, EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR, bin_to_hex
 from ..key import RepoKey, KeyfileKey, Passphrase
 from ..key import RepoKey, KeyfileKey, Passphrase
 from ..keymanager import RepoIdMismatch, NotABorgKeyFile
 from ..keymanager import RepoIdMismatch, NotABorgKeyFile
 from ..remote import RemoteRepository, PathNotAllowed
 from ..remote import RemoteRepository, PathNotAllowed
@@ -409,7 +409,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
     def _set_repository_id(self, path, id):
     def _set_repository_id(self, path, id):
         config = ConfigParser(interpolation=None)
         config = ConfigParser(interpolation=None)
         config.read(os.path.join(path, 'config'))
         config.read(os.path.join(path, 'config'))
-        config.set('repository', 'id', hexlify(id).decode('ascii'))
+        config.set('repository', 'id', bin_to_hex(id))
         with open(os.path.join(path, 'config'), 'w') as fd:
         with open(os.path.join(path, 'config'), 'w') as fd:
             config.write(fd)
             config.write(fd)
         with Repository(self.repository_path) as repository:
         with Repository(self.repository_path) as repository:
@@ -1205,7 +1205,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         with open(export_file, 'r') as fd:
         with open(export_file, 'r') as fd:
             export_contents = fd.read()
             export_contents = fd.read()
 
 
-        assert export_contents.startswith('BORG_KEY ' + hexlify(repo_id).decode() + '\n')
+        assert export_contents.startswith('BORG_KEY ' + bin_to_hex(repo_id) + '\n')
 
 
         key_file = self.keys_path + '/' + os.listdir(self.keys_path)[0]
         key_file = self.keys_path + '/' + os.listdir(self.keys_path)[0]
 
 
@@ -1232,7 +1232,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         with open(export_file, 'r') as fd:
         with open(export_file, 'r') as fd:
             export_contents = fd.read()
             export_contents = fd.read()
 
 
-        assert export_contents.startswith('BORG_KEY ' + hexlify(repo_id).decode() + '\n')
+        assert export_contents.startswith('BORG_KEY ' + bin_to_hex(repo_id) + '\n')
 
 
         with Repository(self.repository_path) as repository:
         with Repository(self.repository_path) as repository:
             repo_key = RepoKey(repository)
             repo_key = RepoKey(repository)

+ 5 - 6
borg/upgrader.py

@@ -1,4 +1,3 @@
-from binascii import hexlify
 import datetime
 import datetime
 import logging
 import logging
 logger = logging.getLogger(__name__)
 logger = logging.getLogger(__name__)
@@ -6,7 +5,7 @@ import os
 import shutil
 import shutil
 import time
 import time
 
 
-from .helpers import get_keys_dir, get_cache_dir, ProgressIndicatorPercent
+from .helpers import get_keys_dir, get_cache_dir, ProgressIndicatorPercent, bin_to_hex
 from .locking import Lock
 from .locking import Lock
 from .repository import Repository, MAGIC
 from .repository import Repository, MAGIC
 from .key import KeyfileKey, KeyfileNotFoundError
 from .key import KeyfileKey, KeyfileNotFoundError
@@ -188,8 +187,8 @@ class AtticRepositoryUpgrader(Repository):
         attic_cache_dir = os.environ.get('ATTIC_CACHE_DIR',
         attic_cache_dir = os.environ.get('ATTIC_CACHE_DIR',
                                          os.path.join(os.path.expanduser('~'),
                                          os.path.join(os.path.expanduser('~'),
                                                       '.cache', 'attic'))
                                                       '.cache', 'attic'))
-        attic_cache_dir = os.path.join(attic_cache_dir, hexlify(self.id).decode('ascii'))
-        borg_cache_dir = os.path.join(get_cache_dir(), hexlify(self.id).decode('ascii'))
+        attic_cache_dir = os.path.join(attic_cache_dir, bin_to_hex(self.id))
+        borg_cache_dir = os.path.join(get_cache_dir(), bin_to_hex(self.id))
 
 
         def copy_cache_file(path):
         def copy_cache_file(path):
             """copy the given attic cache path into the borg directory
             """copy the given attic cache path into the borg directory
@@ -263,7 +262,7 @@ class AtticKeyfileKey(KeyfileKey):
            assume the repository has been opened by the archiver yet
            assume the repository has been opened by the archiver yet
         """
         """
         get_keys_dir = cls.get_keys_dir
         get_keys_dir = cls.get_keys_dir
-        id = hexlify(repository.id).decode('ascii')
+        id = bin_to_hex(repository.id)
         keys_dir = get_keys_dir()
         keys_dir = get_keys_dir()
         if not os.path.exists(keys_dir):
         if not os.path.exists(keys_dir):
             raise KeyfileNotFoundError(repository.path, keys_dir)
             raise KeyfileNotFoundError(repository.path, keys_dir)
@@ -313,7 +312,7 @@ class Borg0xxKeyfileKey(KeyfileKey):
     @classmethod
     @classmethod
     def find_key_file(cls, repository):
     def find_key_file(cls, repository):
         get_keys_dir = cls.get_keys_dir
         get_keys_dir = cls.get_keys_dir
-        id = hexlify(repository.id).decode('ascii')
+        id = bin_to_hex(repository.id)
         keys_dir = get_keys_dir()
         keys_dir = get_keys_dir()
         if not os.path.exists(keys_dir):
         if not os.path.exists(keys_dir):
             raise KeyfileNotFoundError(repository.path, keys_dir)
             raise KeyfileNotFoundError(repository.path, keys_dir)