Explorar el Código

Merge pull request #927 from ThomasWaldmann/move-constants

move some constants to new constants module
TW hace 9 años
padre
commit
5b6377e0c2

+ 11 - 30
borg/archive.py

@@ -17,10 +17,11 @@ import time
 from io import BytesIO
 from io import BytesIO
 from . import xattr
 from . import xattr
 from .compress import Compressor, COMPR_BUFFER
 from .compress import Compressor, COMPR_BUFFER
+from .constants import *  # NOQA
 from .helpers import Error, uid2user, user2uid, gid2group, group2gid, \
 from .helpers import Error, uid2user, user2uid, gid2group, group2gid, \
     parse_timestamp, to_localtime, format_time, format_timedelta, \
     parse_timestamp, to_localtime, format_time, format_timedelta, \
     Manifest, Statistics, decode_dict, make_path_safe, StableDict, int_to_bigint, bigint_to_int, \
     Manifest, Statistics, decode_dict, make_path_safe, StableDict, int_to_bigint, bigint_to_int, \
-    ProgressIndicatorPercent, ChunkIteratorFileWrapper, remove_surrogates, log_multi, DASHES, \
+    ProgressIndicatorPercent, ChunkIteratorFileWrapper, remove_surrogates, log_multi, \
     PathPrefixPattern, FnmatchPattern, open_item, file_status, format_file_size, consume
     PathPrefixPattern, FnmatchPattern, open_item, file_status, format_file_size, consume
 from .repository import Repository
 from .repository import Repository
 from .platform import acl_get, acl_set
 from .platform import acl_get, acl_set
@@ -29,19 +30,6 @@ from .hashindex import ChunkIndex, ChunkIndexEntry
 from .cache import ChunkListEntry
 from .cache import ChunkListEntry
 import msgpack
 import msgpack
 
 
-ITEMS_BUFFER = 1024 * 1024
-
-CHUNK_MIN_EXP = 19  # 2**19 == 512kiB
-CHUNK_MAX_EXP = 23  # 2**23 == 8MiB
-HASH_WINDOW_SIZE = 0xfff  # 4095B
-HASH_MASK_BITS = 21  # results in ~2MiB chunks statistically
-
-# defaults, use --chunker-params to override
-CHUNKER_PARAMS = (CHUNK_MIN_EXP, CHUNK_MAX_EXP, HASH_MASK_BITS, HASH_WINDOW_SIZE)
-
-# chunker params for the items metadata stream, finer granularity
-ITEMS_CHUNKER_PARAMS = (12, 16, 14, HASH_WINDOW_SIZE)
-
 has_lchmod = hasattr(os, 'lchmod')
 has_lchmod = hasattr(os, 'lchmod')
 has_lchflags = hasattr(os, 'lchflags')
 has_lchflags = hasattr(os, 'lchflags')
 
 
@@ -59,7 +47,7 @@ class DownloadPipeline:
         unpacker = msgpack.Unpacker(use_list=False)
         unpacker = msgpack.Unpacker(use_list=False)
         for data in self.fetch_many(ids):
         for data in self.fetch_many(ids):
             unpacker.feed(data)
             unpacker.feed(data)
-            items = [decode_dict(item, (b'path', b'source', b'user', b'group')) for item in unpacker]
+            items = [decode_dict(item, ITEM_TEXT_KEYS) for item in unpacker]
             if filter:
             if filter:
                 items = [item for item in items if filter(item)]
                 items = [item for item in items if filter(item)]
             for item in items:
             for item in items:
@@ -187,7 +175,7 @@ class Archive:
     def load(self, id):
     def load(self, id):
         self.id = id
         self.id = id
         self.metadata = self._load_meta(self.id)
         self.metadata = self._load_meta(self.id)
-        decode_dict(self.metadata, (b'name', b'comment', b'hostname', b'username', b'time', b'time_end'))
+        decode_dict(self.metadata, ARCHIVE_TEXT_KEYS)
         self.metadata[b'cmdline'] = [arg.decode('utf-8', 'surrogateescape') for arg in self.metadata[b'cmdline']]
         self.metadata[b'cmdline'] = [arg.decode('utf-8', 'surrogateescape') for arg in self.metadata[b'cmdline']]
         self.name = self.metadata[b'name']
         self.name = self.metadata[b'name']
 
 
@@ -233,7 +221,7 @@ Number of files: {0.stats.nfiles}'''.format(
 
 
     def add_item(self, item):
     def add_item(self, item):
         unknown_keys = set(item) - ITEM_KEYS
         unknown_keys = set(item) - ITEM_KEYS
-        assert not unknown_keys, ('unknown item metadata keys detected, please update ITEM_KEYS: %s',
+        assert not unknown_keys, ('unknown item metadata keys detected, please update constants.ITEM_KEYS: %s',
                                   ','.join(k.decode('ascii') for k in unknown_keys))
                                   ','.join(k.decode('ascii') for k in unknown_keys))
         if self.show_progress:
         if self.show_progress:
             self.stats.show_progress(item=item, dt=0.2)
             self.stats.show_progress(item=item, dt=0.2)
@@ -631,12 +619,6 @@ Number of files: {0.stats.nfiles}'''.format(
             return os.open(path, flags_normal)
             return os.open(path, flags_normal)
 
 
 
 
-# this set must be kept complete, otherwise the RobustUnpacker might malfunction:
-ITEM_KEYS = set([b'path', b'source', b'rdev', b'chunks', b'hardlink_master',
-                 b'mode', b'user', b'group', b'uid', b'gid', b'mtime', b'atime', b'ctime',
-                 b'xattrs', b'bsdflags', b'acl_nfs4', b'acl_access', b'acl_default', b'acl_extended', ])
-
-
 class RobustUnpacker:
 class RobustUnpacker:
     """A restartable/robust version of the streaming msgpack unpacker
     """A restartable/robust version of the streaming msgpack unpacker
     """
     """
@@ -894,7 +876,7 @@ class ArchiveChecker:
                 archive = StableDict(msgpack.unpackb(data))
                 archive = StableDict(msgpack.unpackb(data))
                 if archive[b'version'] != 1:
                 if archive[b'version'] != 1:
                     raise Exception('Unknown archive metadata version')
                     raise Exception('Unknown archive metadata version')
-                decode_dict(archive, (b'name', b'comment', b'hostname', b'username', b'time', b'time_end'))
+                decode_dict(archive, ARCHIVE_TEXT_KEYS)
                 archive[b'cmdline'] = [arg.decode('utf-8', 'surrogateescape') for arg in archive[b'cmdline']]
                 archive[b'cmdline'] = [arg.decode('utf-8', 'surrogateescape') for arg in archive[b'cmdline']]
                 items_buffer = ChunkBuffer(self.key)
                 items_buffer = ChunkBuffer(self.key)
                 items_buffer.write_chunk = add_callback
                 items_buffer.write_chunk = add_callback
@@ -1154,24 +1136,23 @@ class ArchiveRecreater:
         matcher = self.matcher
         matcher = self.matcher
         tag_files = []
         tag_files = []
         tagged_dirs = []
         tagged_dirs = []
-        # build hardlink masters, but only for paths ending in CACHEDIR.TAG, so we can read hard-linked CACHEDIR.TAGs
+        # build hardlink masters, but only for paths ending in CACHE_TAG_NAME, so we can read hard-linked TAGs
         cachedir_masters = {}
         cachedir_masters = {}
 
 
         for item in archive.iter_items(
         for item in archive.iter_items(
-                filter=lambda item: item[b'path'].endswith('CACHEDIR.TAG') or matcher.match(item[b'path'])):
-            if item[b'path'].endswith('CACHEDIR.TAG'):
+                filter=lambda item: item[b'path'].endswith(CACHE_TAG_NAME) or matcher.match(item[b'path'])):
+            if item[b'path'].endswith(CACHE_TAG_NAME):
                 cachedir_masters[item[b'path']] = item
                 cachedir_masters[item[b'path']] = item
             if stat.S_ISREG(item[b'mode']):
             if stat.S_ISREG(item[b'mode']):
                 dir, tag_file = os.path.split(item[b'path'])
                 dir, tag_file = os.path.split(item[b'path'])
                 if tag_file in self.exclude_if_present:
                 if tag_file in self.exclude_if_present:
                     exclude(dir, item)
                     exclude(dir, item)
-                if self.exclude_caches and tag_file == 'CACHEDIR.TAG':
-                    tag_contents = b'Signature: 8a477f597d28d172789f06886806bc55'
+                if self.exclude_caches and tag_file == CACHE_TAG_NAME:
                     if b'chunks' in item:
                     if b'chunks' in item:
                         file = open_item(archive, item)
                         file = open_item(archive, item)
                     else:
                     else:
                         file = open_item(archive, cachedir_masters[item[b'source']])
                         file = open_item(archive, cachedir_masters[item[b'source']])
-                    if file.read(len(tag_contents)).startswith(tag_contents):
+                    if file.read(len(CACHE_TAG_CONTENTS)).startswith(CACHE_TAG_CONTENTS):
                         exclude(dir, item)
                         exclude(dir, item)
         matcher.add(tag_files, True)
         matcher.add(tag_files, True)
         matcher.add(tagged_dirs, False)
         matcher.add(tagged_dirs, False)

+ 3 - 6
borg/archiver.py

@@ -22,23 +22,21 @@ from .helpers import Error, location_validator, archivename_validator, format_ti
     get_cache_dir, prune_within, prune_split, \
     get_cache_dir, prune_within, prune_split, \
     Manifest, remove_surrogates, update_excludes, format_archive, check_extension_modules, Statistics, \
     Manifest, remove_surrogates, update_excludes, format_archive, check_extension_modules, Statistics, \
     dir_is_tagged, ChunkerParams, CompressionSpec, is_slow_msgpack, yes, sysinfo, \
     dir_is_tagged, ChunkerParams, CompressionSpec, is_slow_msgpack, yes, sysinfo, \
-    EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR, log_multi, PatternMatcher, ItemFormatter, DASHES
+    log_multi, PatternMatcher, ItemFormatter
 from .logger import create_logger, setup_logging
 from .logger import create_logger, setup_logging
 logger = create_logger()
 logger = create_logger()
 from .compress import Compressor, COMPR_BUFFER
 from .compress import Compressor, COMPR_BUFFER
 from .upgrader import AtticRepositoryUpgrader, BorgRepositoryUpgrader
 from .upgrader import AtticRepositoryUpgrader, BorgRepositoryUpgrader
 from .repository import Repository
 from .repository import Repository
 from .cache import Cache
 from .cache import Cache
+from .constants import *  # NOQA
 from .key import key_creator, RepoKey, PassphraseKey
 from .key import key_creator, RepoKey, PassphraseKey
-from .archive import Archive, ArchiveChecker, ArchiveRecreater, CHUNKER_PARAMS
+from .archive import Archive, ArchiveChecker, ArchiveRecreater
 from .remote import RepositoryServer, RemoteRepository, cache_if_remote
 from .remote import RepositoryServer, RemoteRepository, cache_if_remote
 from .hashindex import ChunkIndexEntry
 from .hashindex import ChunkIndexEntry
 
 
 has_lchflags = hasattr(os, 'lchflags')
 has_lchflags = hasattr(os, 'lchflags')
 
 
-# default umask, overriden by --umask, defaults to read/write only for owner
-UMASK_DEFAULT = 0o077
-
 
 
 def argument(args, str_or_bool):
 def argument(args, str_or_bool):
     """If bool is passed, return it. If str is passed, retrieve named attribute from args."""
     """If bool is passed, return it. If str is passed, retrieve named attribute from args."""
@@ -1960,7 +1958,6 @@ def main():  # pragma: no cover
         elif exit_code == EXIT_ERROR:
         elif exit_code == EXIT_ERROR:
             logger.error(exit_msg % ('error', exit_code))
             logger.error(exit_msg % ('error', exit_code))
         else:
         else:
-            # if you see 666 in output, it usually means exit_code was None
             logger.error(exit_msg % ('abnormal', exit_code or 666))
             logger.error(exit_msg % ('abnormal', exit_code or 666))
     sys.exit(exit_code)
     sys.exit(exit_code)
 
 

+ 37 - 0
borg/constants.py

@@ -0,0 +1,37 @@
+# this set must be kept complete, otherwise the RobustUnpacker might malfunction:
+ITEM_KEYS = set([b'path', b'source', b'rdev', b'chunks', b'hardlink_master',
+                 b'mode', b'user', b'group', b'uid', b'gid', b'mtime', b'atime', b'ctime',
+                 b'xattrs', b'bsdflags', b'acl_nfs4', b'acl_access', b'acl_default', b'acl_extended', ])
+
+ARCHIVE_TEXT_KEYS = (b'name', b'comment', b'hostname', b'username', b'time', b'time_end')
+ITEM_TEXT_KEYS = (b'path', b'source', b'user', b'group')
+
+# default umask, overriden by --umask, defaults to read/write only for owner
+UMASK_DEFAULT = 0o077
+
+CACHE_TAG_NAME = 'CACHEDIR.TAG'
+CACHE_TAG_CONTENTS = b'Signature: 8a477f597d28d172789f06886806bc55'
+
+DEFAULT_MAX_SEGMENT_SIZE = 5 * 1024 * 1024
+DEFAULT_SEGMENTS_PER_DIR = 10000
+
+CHUNK_MIN_EXP = 19  # 2**19 == 512kiB
+CHUNK_MAX_EXP = 23  # 2**23 == 8MiB
+HASH_WINDOW_SIZE = 0xfff  # 4095B
+HASH_MASK_BITS = 21  # results in ~2MiB chunks statistically
+
+# defaults, use --chunker-params to override
+CHUNKER_PARAMS = (CHUNK_MIN_EXP, CHUNK_MAX_EXP, HASH_MASK_BITS, HASH_WINDOW_SIZE)
+
+# chunker params for the items metadata stream, finer granularity
+ITEMS_CHUNKER_PARAMS = (12, 16, 14, HASH_WINDOW_SIZE)
+
+# return codes returned by borg command
+# when borg is killed by signal N, rc = 128 + N
+EXIT_SUCCESS = 0  # everything done, no problems
+EXIT_WARNING = 1  # reached normal end of operation, but there were issues
+EXIT_ERROR = 2  # terminated abruptly, did not reach end of operation
+
+DASHES = '-' * 78
+
+PBKDF2_ITERATIONS = 100000

+ 8 - 17
borg/helpers.py

@@ -28,6 +28,7 @@ from operator import attrgetter
 from . import __version__ as borg_version
 from . import __version__ as borg_version
 from . import hashindex
 from . import hashindex
 from . import chunker
 from . import chunker
+from .constants import *  # NOQA
 from . import crypto
 from . import crypto
 from . import shellpattern
 from . import shellpattern
 import msgpack
 import msgpack
@@ -35,14 +36,6 @@ import msgpack.fallback
 
 
 import socket
 import socket
 
 
-# return codes returned by borg command
-# when borg is killed by signal N, rc = 128 + N
-EXIT_SUCCESS = 0  # everything done, no problems
-EXIT_WARNING = 1  # reached normal end of operation, but there were issues
-EXIT_ERROR = 2  # terminated abruptly, did not reach end of operation
-
-DASHES = '-' * 78
-
 
 
 class Error(Exception):
 class Error(Exception):
     """Error base class"""
     """Error base class"""
@@ -248,13 +241,13 @@ def get_cache_dir():
     if not os.path.exists(cache_dir):
     if not os.path.exists(cache_dir):
         os.makedirs(cache_dir)
         os.makedirs(cache_dir)
         os.chmod(cache_dir, stat.S_IRWXU)
         os.chmod(cache_dir, stat.S_IRWXU)
-        with open(os.path.join(cache_dir, 'CACHEDIR.TAG'), 'w') as fd:
+        with open(os.path.join(cache_dir, CACHE_TAG_NAME), 'wb') as fd:
+            fd.write(CACHE_TAG_CONTENTS)
             fd.write(textwrap.dedent("""
             fd.write(textwrap.dedent("""
-                Signature: 8a477f597d28d172789f06886806bc55
                 # This file is a cache directory tag created by Borg.
                 # This file is a cache directory tag created by Borg.
                 # For information about cache directory tags, see:
                 # For information about cache directory tags, see:
                 #       http://www.brynosaurus.com/cachedir/
                 #       http://www.brynosaurus.com/cachedir/
-                """).lstrip())
+                """).encode('ascii'))
     return cache_dir
     return cache_dir
 
 
 
 
@@ -495,7 +488,6 @@ def timestamp(s):
 
 
 def ChunkerParams(s):
 def ChunkerParams(s):
     if s.strip().lower() == "default":
     if s.strip().lower() == "default":
-        from .archive import CHUNKER_PARAMS
         return CHUNKER_PARAMS
         return CHUNKER_PARAMS
     chunk_min, chunk_max, chunk_mask, window_size = s.split(',')
     chunk_min, chunk_max, chunk_mask, window_size = s.split(',')
     if int(chunk_max) > 23:
     if int(chunk_max) > 23:
@@ -534,13 +526,12 @@ def dir_is_cachedir(path):
     (http://www.brynosaurus.com/cachedir/spec.html).
     (http://www.brynosaurus.com/cachedir/spec.html).
     """
     """
 
 
-    tag_contents = b'Signature: 8a477f597d28d172789f06886806bc55'
-    tag_path = os.path.join(path, 'CACHEDIR.TAG')
+    tag_path = os.path.join(path, CACHE_TAG_NAME)
     try:
     try:
         if os.path.exists(tag_path):
         if os.path.exists(tag_path):
             with open(tag_path, 'rb') as tag_file:
             with open(tag_path, 'rb') as tag_file:
-                tag_data = tag_file.read(len(tag_contents))
-                if tag_data == tag_contents:
+                tag_data = tag_file.read(len(CACHE_TAG_CONTENTS))
+                if tag_data == CACHE_TAG_CONTENTS:
                     return True
                     return True
     except OSError:
     except OSError:
         pass
         pass
@@ -555,7 +546,7 @@ def dir_is_tagged(path, exclude_caches, exclude_if_present):
     """
     """
     tag_paths = []
     tag_paths = []
     if exclude_caches and dir_is_cachedir(path):
     if exclude_caches and dir_is_cachedir(path):
-        tag_paths.append(os.path.join(path, 'CACHEDIR.TAG'))
+        tag_paths.append(os.path.join(path, CACHE_TAG_NAME))
     if exclude_if_present is not None:
     if exclude_if_present is not None:
         for tag in exclude_if_present:
         for tag in exclude_if_present:
             tag_path = os.path.join(path, tag)
             tag_path = os.path.join(path, tag)

+ 2 - 1
borg/key.py

@@ -11,6 +11,7 @@ from .helpers import IntegrityError, get_keys_dir, Error, yes
 from .logger import create_logger
 from .logger import create_logger
 logger = create_logger()
 logger = create_logger()
 
 
+from .constants import *  # NOQA
 from .crypto import AES, bytes_to_long, long_to_bytes, bytes_to_int, num_aes_blocks, hmac_sha256
 from .crypto import AES, bytes_to_long, long_to_bytes, bytes_to_int, num_aes_blocks, hmac_sha256
 from .compress import Compressor, COMPR_BUFFER
 from .compress import Compressor, COMPR_BUFFER
 import msgpack
 import msgpack
@@ -338,7 +339,7 @@ class KeyfileKeyBase(AESKeyBase):
 
 
     def encrypt_key_file(self, data, passphrase):
     def encrypt_key_file(self, data, passphrase):
         salt = os.urandom(32)
         salt = os.urandom(32)
-        iterations = 100000
+        iterations = PBKDF2_ITERATIONS
         key = passphrase.kdf(salt, iterations, 32)
         key = passphrase.kdf(salt, iterations, 32)
         hash = hmac_sha256(key, data)
         hash = hmac_sha256(key, data)
         cdata = AES(is_encrypt=True, key=key).encrypt(data)
         cdata = AES(is_encrypt=True, key=key).encrypt(data)

+ 3 - 5
borg/repository.py

@@ -12,6 +12,7 @@ import struct
 from zlib import crc32
 from zlib import crc32
 
 
 import msgpack
 import msgpack
+from .constants import *  # NOQA
 from .helpers import Error, ErrorWithTraceback, IntegrityError, Location, ProgressIndicatorPercent
 from .helpers import Error, ErrorWithTraceback, IntegrityError, Location, ProgressIndicatorPercent
 from .hashindex import NSIndex
 from .hashindex import NSIndex
 from .locking import UpgradableLock, LockError, LockErrorT
 from .locking import UpgradableLock, LockError, LockErrorT
@@ -35,9 +36,6 @@ class Repository:
     dir/index.X
     dir/index.X
     dir/hints.X
     dir/hints.X
     """
     """
-    DEFAULT_MAX_SEGMENT_SIZE = 5 * 1024 * 1024
-    DEFAULT_SEGMENTS_PER_DIR = 10000
-
     class DoesNotExist(Error):
     class DoesNotExist(Error):
         """Repository {} does not exist."""
         """Repository {} does not exist."""
 
 
@@ -98,8 +96,8 @@ class Repository:
         config = ConfigParser(interpolation=None)
         config = ConfigParser(interpolation=None)
         config.add_section('repository')
         config.add_section('repository')
         config.set('repository', 'version', '1')
         config.set('repository', 'version', '1')
-        config.set('repository', 'segments_per_dir', str(self.DEFAULT_SEGMENTS_PER_DIR))
-        config.set('repository', 'max_segment_size', str(self.DEFAULT_MAX_SEGMENT_SIZE))
+        config.set('repository', 'segments_per_dir', str(DEFAULT_SEGMENTS_PER_DIR))
+        config.set('repository', 'max_segment_size', str(DEFAULT_MAX_SEGMENT_SIZE))
         config.set('repository', 'append_only', '0')
         config.set('repository', 'append_only', '0')
         config.set('repository', 'id', hexlify(os.urandom(32)).decode('ascii'))
         config.set('repository', 'id', hexlify(os.urandom(32)).decode('ascii'))
         self.save_config(path, config)
         self.save_config(path, config)

+ 15 - 10
borg/testsuite/archiver.py

@@ -18,11 +18,12 @@ from hashlib import sha256
 import pytest
 import pytest
 
 
 from .. import xattr
 from .. import xattr
-from ..archive import Archive, ChunkBuffer, ArchiveRecreater, CHUNK_MAX_EXP
+from ..archive import Archive, ChunkBuffer, ArchiveRecreater
 from ..archiver import Archiver
 from ..archiver import Archiver
 from ..cache import Cache
 from ..cache import Cache
+from ..constants import *  # NOQA
 from ..crypto import bytes_to_long, num_aes_blocks
 from ..crypto import bytes_to_long, num_aes_blocks
-from ..helpers import Manifest, EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR
+from ..helpers import Manifest
 from ..key import KeyfileKeyBase
 from ..key import KeyfileKeyBase
 from ..remote import RemoteRepository, PathNotAllowed
 from ..remote import RemoteRepository, PathNotAllowed
 from ..repository import Repository
 from ..repository import Repository
@@ -641,16 +642,18 @@ class ArchiverTestCase(ArchiverTestCaseBase):
     def _create_test_caches(self):
     def _create_test_caches(self):
         self.cmd('init', self.repository_location)
         self.cmd('init', self.repository_location)
         self.create_regular_file('file1', size=1024 * 80)
         self.create_regular_file('file1', size=1024 * 80)
-        self.create_regular_file('cache1/CACHEDIR.TAG', contents=b'Signature: 8a477f597d28d172789f06886806bc55 extra stuff')
-        self.create_regular_file('cache2/CACHEDIR.TAG', contents=b'invalid signature')
+        self.create_regular_file('cache1/%s' % CACHE_TAG_NAME,
+                                 contents=CACHE_TAG_CONTENTS + b' extra stuff')
+        self.create_regular_file('cache2/%s' % CACHE_TAG_NAME,
+                                 contents=b'invalid signature')
         os.mkdir('input/cache3')
         os.mkdir('input/cache3')
-        os.link('input/cache1/CACHEDIR.TAG', 'input/cache3/CACHEDIR.TAG')
+        os.link('input/cache1/%s' % CACHE_TAG_NAME, 'input/cache3/%s' % CACHE_TAG_NAME)
 
 
     def _assert_test_caches(self):
     def _assert_test_caches(self):
         with changedir('output'):
         with changedir('output'):
             self.cmd('extract', self.repository_location + '::test')
             self.cmd('extract', self.repository_location + '::test')
         self.assert_equal(sorted(os.listdir('output/input')), ['cache2', 'file1'])
         self.assert_equal(sorted(os.listdir('output/input')), ['cache2', 'file1'])
-        self.assert_equal(sorted(os.listdir('output/input/cache2')), ['CACHEDIR.TAG'])
+        self.assert_equal(sorted(os.listdir('output/input/cache2')), [CACHE_TAG_NAME])
 
 
     def test_exclude_caches(self):
     def test_exclude_caches(self):
         self._create_test_caches()
         self._create_test_caches()
@@ -694,11 +697,13 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         self.create_regular_file('tagged1/file1', size=1024)
         self.create_regular_file('tagged1/file1', size=1024)
         self.create_regular_file('tagged2/.NOBACKUP2')
         self.create_regular_file('tagged2/.NOBACKUP2')
         self.create_regular_file('tagged2/file2', size=1024)
         self.create_regular_file('tagged2/file2', size=1024)
-        self.create_regular_file('tagged3/CACHEDIR.TAG', contents=b'Signature: 8a477f597d28d172789f06886806bc55 extra stuff')
+        self.create_regular_file('tagged3/%s' % CACHE_TAG_NAME,
+                                 contents=CACHE_TAG_CONTENTS + b' extra stuff')
         self.create_regular_file('tagged3/file3', size=1024)
         self.create_regular_file('tagged3/file3', size=1024)
         self.create_regular_file('taggedall/.NOBACKUP1')
         self.create_regular_file('taggedall/.NOBACKUP1')
         self.create_regular_file('taggedall/.NOBACKUP2')
         self.create_regular_file('taggedall/.NOBACKUP2')
-        self.create_regular_file('taggedall/CACHEDIR.TAG', contents=b'Signature: 8a477f597d28d172789f06886806bc55 extra stuff')
+        self.create_regular_file('taggedall/%s' % CACHE_TAG_NAME,
+                                 contents=CACHE_TAG_CONTENTS + b' extra stuff')
         self.create_regular_file('taggedall/file4', size=1024)
         self.create_regular_file('taggedall/file4', size=1024)
 
 
     def _assert_test_keep_tagged(self):
     def _assert_test_keep_tagged(self):
@@ -707,9 +712,9 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         self.assert_equal(sorted(os.listdir('output/input')), ['file0', 'tagged1', 'tagged2', 'tagged3', 'taggedall'])
         self.assert_equal(sorted(os.listdir('output/input')), ['file0', 'tagged1', 'tagged2', 'tagged3', 'taggedall'])
         self.assert_equal(os.listdir('output/input/tagged1'), ['.NOBACKUP1'])
         self.assert_equal(os.listdir('output/input/tagged1'), ['.NOBACKUP1'])
         self.assert_equal(os.listdir('output/input/tagged2'), ['.NOBACKUP2'])
         self.assert_equal(os.listdir('output/input/tagged2'), ['.NOBACKUP2'])
-        self.assert_equal(os.listdir('output/input/tagged3'), ['CACHEDIR.TAG'])
+        self.assert_equal(os.listdir('output/input/tagged3'), [CACHE_TAG_NAME])
         self.assert_equal(sorted(os.listdir('output/input/taggedall')),
         self.assert_equal(sorted(os.listdir('output/input/taggedall')),
-                          ['.NOBACKUP1', '.NOBACKUP2', 'CACHEDIR.TAG', ])
+                          ['.NOBACKUP1', '.NOBACKUP2', CACHE_TAG_NAME, ])
 
 
     def test_exclude_keep_tagged(self):
     def test_exclude_keep_tagged(self):
         self._create_test_keep_tagged()
         self._create_test_keep_tagged()

+ 1 - 1
borg/testsuite/chunker.py

@@ -1,7 +1,7 @@
 from io import BytesIO
 from io import BytesIO
 
 
 from ..chunker import Chunker, buzhash, buzhash_update
 from ..chunker import Chunker, buzhash, buzhash_update
-from ..archive import CHUNK_MAX_EXP, CHUNKER_PARAMS
+from ..constants import *  # NOQA
 from . import BaseTestCase
 from . import BaseTestCase
 
 
 
 

+ 1 - 1
borg/testsuite/upgrader.py

@@ -9,10 +9,10 @@ try:
 except ImportError:
 except ImportError:
     attic = None
     attic = None
 
 
+from ..constants import *  # NOQA
 from ..upgrader import AtticRepositoryUpgrader, AtticKeyfileKey
 from ..upgrader import AtticRepositoryUpgrader, AtticKeyfileKey
 from ..helpers import get_keys_dir
 from ..helpers import get_keys_dir
 from ..key import KeyfileKey
 from ..key import KeyfileKey
-from ..archiver import UMASK_DEFAULT
 from ..repository import Repository
 from ..repository import Repository