浏览代码

Rename store -> repository

Jonas Borgström 12 年之前
父节点
当前提交
c4ec578090
共有 9 个文件被更改,包括 178 次插入682 次删除
  1. 12 12
      darc/archive.py
  2. 52 52
      darc/archiver.py
  3. 10 10
      darc/cache.py
  4. 8 8
      darc/helpers.py
  5. 34 34
      darc/key.py
  6. 13 13
      darc/remote.py
  7. 0 503
      darc/store.py
  8. 41 41
      darc/test.py
  9. 8 9
      docs/index.rst

+ 12 - 12
darc/archive.py

@@ -81,11 +81,11 @@ class Archive(object):
     class AlreadyExists(Exception):
         pass
 
-    def __init__(self, store, key, manifest, name, cache=None, create=False,
+    def __init__(self, repository, key, manifest, name, cache=None, create=False,
                  checkpoint_interval=300, numeric_owner=False):
         self.cwd = os.getcwd()
         self.key = key
-        self.store = store
+        self.repository = repository
         self.cache = cache
         self.manifest = manifest
         self.items = BytesIO()
@@ -113,7 +113,7 @@ class Archive(object):
 
     def load(self, id):
         self.id = id
-        data = self.key.decrypt(self.id, self.store.get(self.id))
+        data = self.key.decrypt(self.id, self.repository.get(self.id))
         self.metadata = msgpack.unpackb(data)
         if self.metadata[b'version'] != 1:
             raise Exception('Unknown archive metadata version')
@@ -139,7 +139,7 @@ class Archive(object):
             i += n
             if not items:
                 break
-            for id, chunk in [(id, chunk) for id, chunk in zip_longest(items, self.store.get_many(items))]:
+            for id, chunk in [(id, chunk) for id, chunk in zip_longest(items, self.repository.get_many(items))]:
                 unpacker.feed(self.key.decrypt(id, chunk))
                 iter = ItemIter(unpacker, filter)
                 for item in iter:
@@ -195,7 +195,7 @@ class Archive(object):
         self.cache.add_chunk(self.id, data, self.stats)
         self.manifest.archives[name] = {'id': self.id, 'time': metadata['time']}
         self.manifest.write()
-        self.store.commit()
+        self.repository.commit()
         self.cache.commit()
 
     def calc_stats(self, cache):
@@ -209,7 +209,7 @@ class Archive(object):
         cache.begin_txn()
         stats = Statistics()
         add(self.id)
-        for id, chunk in zip_longest(self.metadata[b'items'], self.store.get_many(self.metadata[b'items'])):
+        for id, chunk in zip_longest(self.metadata[b'items'], self.repository.get_many(self.metadata[b'items'])):
             add(id)
             unpacker.feed(self.key.decrypt(id, chunk))
             for item in unpacker:
@@ -253,7 +253,7 @@ class Archive(object):
             else:
                 with open(path, 'wb') as fd:
                     ids = [id for id, size, csize in item[b'chunks']]
-                    for id, chunk in zip_longest(ids, self.store.get_many(ids, peek)):
+                    for id, chunk in zip_longest(ids, self.repository.get_many(ids, peek)):
                         data = self.key.decrypt(id, chunk)
                         fd.write(data)
                     self.restore_attrs(path, item, fd=fd.fileno())
@@ -319,7 +319,7 @@ class Archive(object):
             start(item)
             ids = [id for id, size, csize in item[b'chunks']]
             try:
-                for id, chunk in zip_longest(ids, self.store.get_many(ids, peek)):
+                for id, chunk in zip_longest(ids, self.repository.get_many(ids, peek)):
                     self.key.decrypt(id, chunk)
             except Exception:
                 result(item, False)
@@ -329,7 +329,7 @@ class Archive(object):
     def delete(self, cache):
         unpacker = msgpack.Unpacker(use_list=False)
         for id in self.metadata[b'items']:
-            unpacker.feed(self.key.decrypt(id, self.store.get(id)))
+            unpacker.feed(self.key.decrypt(id, self.repository.get(id)))
             for item in unpacker:
                 try:
                     for chunk_id, size, csize in item[b'chunks']:
@@ -340,7 +340,7 @@ class Archive(object):
         self.cache.chunk_decref(self.id)
         del self.manifest.archives[self.name]
         self.manifest.write()
-        self.store.commit()
+        self.repository.commit()
         cache.commit()
 
     def stat_attrs(self, st, path):
@@ -412,6 +412,6 @@ class Archive(object):
         self.add_item(item)
 
     @staticmethod
-    def list_archives(store, key, manifest, cache=None):
+    def list_archives(repository, key, manifest, cache=None):
         for name, info in manifest.archives.items():
-            yield Archive(store, key, manifest, name, cache=cache)
+            yield Archive(repository, key, manifest, name, cache=cache)

+ 52 - 52
darc/archiver.py

@@ -7,13 +7,13 @@ import stat
 import sys
 
 from .archive import Archive
-from .store import Store
+from .repository import Repository
 from .cache import Cache
 from .key import key_creator
 from .helpers import location_validator, format_time, \
     format_file_mode, IncludePattern, ExcludePattern, exclude_path, adjust_patterns, to_localtime, \
     get_cache_dir, format_timedelta, prune_split, Manifest, Location, remove_surrogates
-from .remote import StoreServer, RemoteStore
+from .remote import RepositoryServer, RemoteRepository
 
 
 class Archiver(object):
@@ -21,13 +21,13 @@ class Archiver(object):
     def __init__(self):
         self.exit_code = 0
 
-    def open_store(self, location, create=False):
+    def open_repository(self, location, create=False):
         if location.proto == 'ssh':
-            store = RemoteStore(location, create=create)
+            repository = RemoteRepository(location, create=create)
         else:
-            store = Store(location.path, create=create)
-        store._location = location
-        return store
+            repository = Repository(location.path, create=create)
+        repository._location = location
+        return repository
 
     def print_error(self, msg, *args):
         msg = args and msg % args or msg
@@ -43,31 +43,31 @@ class Archiver(object):
                 print(msg, end=' ')
 
     def do_serve(self, args):
-        return StoreServer().serve()
+        return RepositoryServer().serve()
 
     def do_init(self, args):
-        print('Initializing store "%s"' % args.store.orig)
-        store = self.open_store(args.store, create=True)
-        key = key_creator(store, args)
+        print('Initializing repository at "%s"' % args.repository.orig)
+        repository = self.open_repository(args.repository, create=True)
+        key = key_creator(repository, args)
         manifest = Manifest()
-        manifest.store = store
+        manifest.repository = repository
         manifest.key = key
         manifest.write()
-        store.commit()
+        repository.commit()
         return self.exit_code
 
     def do_change_passphrase(self, args):
-        store = self.open_store(Location(args.store))
-        manifest, key = Manifest.load(store)
+        repository = self.open_repository(Location(args.repository))
+        manifest, key = Manifest.load(repository)
         key.change_passphrase()
         return self.exit_code
 
     def do_create(self, args):
         t0 = datetime.now()
-        store = self.open_store(args.archive)
-        manifest, key = Manifest.load(store)
-        cache = Cache(store, key, manifest)
-        archive = Archive(store, key, manifest, args.archive.archive, cache=cache,
+        repository = self.open_repository(args.archive)
+        manifest, key = Manifest.load(repository)
+        cache = Cache(repository, key, manifest)
+        archive = Archive(repository, key, manifest, args.archive.archive, cache=cache,
                           create=True, checkpoint_interval=args.checkpoint_interval,
                           numeric_owner=args.numeric_owner)
         # Add darc cache dir to inode_skip list
@@ -77,7 +77,7 @@ class Archiver(object):
             skip_inodes.add((st.st_ino, st.st_dev))
         except IOError:
             pass
-        # Add local store dir to inode_skip list
+        # Add local repository dir to inode_skip list
         if not args.archive.host:
             try:
                 st = os.stat(args.archive.path)
@@ -150,9 +150,9 @@ class Archiver(object):
             self.print_error('Unknown file type: %s', path)
 
     def do_extract(self, args):
-        store = self.open_store(args.archive)
-        manifest, key = Manifest.load(store)
-        archive = Archive(store, key, manifest, args.archive.archive,
+        repository = self.open_repository(args.archive)
+        manifest, key = Manifest.load(repository)
+        archive = Archive(repository, key, manifest, args.archive.archive,
                           numeric_owner=args.numeric_owner)
         dirs = []
         for item, peek in archive.iter_items(lambda item: not exclude_path(item[b'path'], args.patterns)):
@@ -173,19 +173,19 @@ class Archiver(object):
         return self.exit_code
 
     def do_delete(self, args):
-        store = self.open_store(args.archive)
-        manifest, key = Manifest.load(store)
-        cache = Cache(store, key, manifest)
-        archive = Archive(store, key, manifest, args.archive.archive, cache=cache)
+        repository = self.open_repository(args.archive)
+        manifest, key = Manifest.load(repository)
+        cache = Cache(repository, key, manifest)
+        archive = Archive(repository, key, manifest, args.archive.archive, cache=cache)
         archive.delete(cache)
         return self.exit_code
 
     def do_list(self, args):
-        store = self.open_store(args.src)
-        manifest, key = Manifest.load(store)
+        repository = self.open_repository(args.src)
+        manifest, key = Manifest.load(repository)
         if args.src.archive:
             tmap = {1: 'p', 2: 'c', 4: 'd', 6: 'b', 0o10: '-', 0o12: 'l', 0o14: 's'}
-            archive = Archive(store, key, manifest, args.src.archive)
+            archive = Archive(repository, key, manifest, args.src.archive)
             for item, _ in archive.iter_items():
                 type = tmap.get(item[b'mode'] // 4096, '?')
                 mode = format_file_mode(item[b'mode'])
@@ -208,14 +208,14 @@ class Archiver(object):
                                                   item[b'group'] or item[b'gid'], size, mtime,
                                                   remove_surrogates(item[b'path']), extra))
         else:
-            for archive in sorted(Archive.list_archives(store, key, manifest), key=attrgetter('ts')):
+            for archive in sorted(Archive.list_archives(repository, key, manifest), key=attrgetter('ts')):
                 print('%-20s %s' % (archive.metadata[b'name'], to_localtime(archive.ts).strftime('%c')))
         return self.exit_code
 
     def do_verify(self, args):
-        store = self.open_store(args.archive)
-        manifest, key = Manifest.load(store)
-        archive = Archive(store, key, manifest, args.archive.archive)
+        repository = self.open_repository(args.archive)
+        manifest, key = Manifest.load(repository)
+        archive = Archive(repository, key, manifest, args.archive.archive)
 
         def start_cb(item):
             self.print_verbose('%s ...', remove_surrogates(item[b'path']), newline=False)
@@ -232,10 +232,10 @@ class Archiver(object):
         return self.exit_code
 
     def do_info(self, args):
-        store = self.open_store(args.archive)
-        manifest, key = Manifest.load(store)
-        cache = Cache(store, key, manifest)
-        archive = Archive(store, key, manifest, args.archive.archive, cache=cache)
+        repository = self.open_repository(args.archive)
+        manifest, key = Manifest.load(repository)
+        cache = Cache(repository, key, manifest)
+        archive = Archive(repository, key, manifest, args.archive.archive, cache=cache)
         stats = archive.calc_stats(cache)
         print('Name:', archive.name)
         print('Fingerprint: %s' % hexlify(archive.id).decode('ascii'))
@@ -247,10 +247,10 @@ class Archiver(object):
         return self.exit_code
 
     def do_prune(self, args):
-        store = self.open_store(args.store)
-        manifest, key = Manifest.load(store)
-        cache = Cache(store, key, manifest)
-        archives = list(sorted(Archive.list_archives(store, key, manifest, cache),
+        repository = self.open_repository(args.repository)
+        manifest, key = Manifest.load(repository)
+        cache = Cache(repository, key, manifest)
+        archives = list(sorted(Archive.list_archives(repository, key, manifest, cache),
                                key=attrgetter('ts'), reverse=True))
         if args.hourly + args.daily + args.weekly + args.monthly + args.yearly == 0:
             self.print_error('At least one of the "hourly", "daily", "weekly", "monthly" or "yearly" '
@@ -299,9 +299,9 @@ class Archiver(object):
 
         subparser = subparsers.add_parser('init', parents=[common_parser])
         subparser.set_defaults(func=self.do_init)
-        subparser.add_argument('store',
+        subparser.add_argument('repository',
                                type=location_validator(archive=False),
-                               help='Store to create')
+                               help='Repository to create')
         subparser.add_argument('--key-file', dest='keyfile',
                                action='store_true', default=False,
                                help='Encrypt data using key file')
@@ -311,7 +311,7 @@ class Archiver(object):
 
         subparser = subparsers.add_parser('change-passphrase', parents=[common_parser])
         subparser.set_defaults(func=self.do_change_passphrase)
-        subparser.add_argument('store', type=location_validator(archive=False))
+        subparser.add_argument('repository', type=location_validator(archive=False))
 
         subparser = subparsers.add_parser('create', parents=[common_parser])
         subparser.set_defaults(func=self.do_create)
@@ -365,7 +365,7 @@ class Archiver(object):
         subparser = subparsers.add_parser('list', parents=[common_parser])
         subparser.set_defaults(func=self.do_list)
         subparser.add_argument('src', metavar='SRC', type=location_validator(),
-                               help='Store/Archive to list contents of')
+                               help='Repository/Archive to list contents of')
 
         subparser = subparsers.add_parser('verify', parents=[common_parser])
         subparser.set_defaults(func=self.do_verify)
@@ -399,9 +399,9 @@ class Archiver(object):
                                help='Number of yearly archives to keep')
         subparser.add_argument('-p', '--prefix', dest='prefix', type=str,
                                help='Only consider archive names starting with this prefix')
-        subparser.add_argument('store', metavar='STORE',
+        subparser.add_argument('repository', metavar='REPOSITORY',
                                type=location_validator(archive=False),
-                               help='Store to prune')
+                               help='Repository to prune')
 
         args = parser.parse_args(args)
         if getattr(args, 'patterns', None):
@@ -414,11 +414,11 @@ def main():
     archiver = Archiver()
     try:
         exit_code = archiver.run()
-    except Store.DoesNotExist:
-        archiver.print_error('Error: Store not found')
+    except Repository.DoesNotExist:
+        archiver.print_error('Error: Repository not found')
         exit_code = 1
-    except Store.AlreadyExists:
-        archiver.print_error('Error: Store already exists')
+    except Repository.AlreadyExists:
+        archiver.print_error('Error: Repository already exists')
         exit_code = 1
     except Archive.AlreadyExists as e:
         archiver.print_error('Error: Archive "%s" already exists', e)

+ 10 - 10
darc/cache.py

@@ -14,12 +14,12 @@ class Cache(object):
     """Client Side cache
     """
 
-    def __init__(self, store, key, manifest):
+    def __init__(self, repository, key, manifest):
         self.txn_active = False
-        self.store = store
+        self.repository = repository
         self.key = key
         self.manifest = manifest
-        self.path = os.path.join(get_cache_dir(), hexlify(store.id).decode('ascii'))
+        self.path = os.path.join(get_cache_dir(), hexlify(repository.id).decode('ascii'))
         if not os.path.exists(self.path):
             self.create()
         self.open()
@@ -28,7 +28,7 @@ class Cache(object):
             self.commit()
 
     def create(self):
-        """Create a new empty store at `path`
+        """Create a new empty repository at `path`
         """
         os.makedirs(self.path)
         with open(os.path.join(self.path, 'README'), 'w') as fd:
@@ -36,7 +36,7 @@ class Cache(object):
         config = RawConfigParser()
         config.add_section('cache')
         config.set('cache', 'version', '1')
-        config.set('cache', 'store', hexlify(self.store.id).decode('ascii'))
+        config.set('cache', 'repository', hexlify(self.repository.id).decode('ascii'))
         config.set('cache', 'manifest', '')
         with open(os.path.join(self.path, 'config'), 'w') as fd:
             config.write(fd)
@@ -54,7 +54,7 @@ class Cache(object):
         self.config.read(os.path.join(self.path, 'config'))
         if self.config.getint('cache', 'version') != 1:
             raise Exception('%s Does not look like a darc cache')
-        self.id = self.config.get('cache', 'store')
+        self.id = self.config.get('cache', 'repository')
         self.manifest_id = unhexlify(self.config.get('cache', 'manifest').encode('ascii'))  # .encode needed for Python 3.[0-2]
         self.chunks = ChunkIndex(os.path.join(self.path, 'chunks').encode('utf-8'))
         self.files = None
@@ -135,13 +135,13 @@ class Cache(object):
         unpacker = msgpack.Unpacker()
         for name, info in self.manifest.archives.items():
             id = info[b'id']
-            cdata = self.store.get(id)
+            cdata = self.repository.get(id)
             data = self.key.decrypt(id, cdata)
             add(id, len(data), len(cdata))
             archive = msgpack.unpackb(data)
             decode_dict(archive, (b'name', b'hostname', b'username', b'time'))  # fixme: argv
             print('Analyzing archive:', archive[b'name'])
-            for id, chunk in zip_longest(archive[b'items'], self.store.get_many(archive[b'items'])):
+            for id, chunk in zip_longest(archive[b'items'], self.repository.get_many(archive[b'items'])):
                 data = self.key.decrypt(id, chunk)
                 add(id, len(data), len(chunk))
                 unpacker.feed(data)
@@ -160,7 +160,7 @@ class Cache(object):
         size = len(data)
         data = self.key.encrypt(data)
         csize = len(data)
-        self.store.put(id, data, wait=False)
+        self.repository.put(id, data, wait=False)
         self.chunks[id] = (1, size, csize)
         stats.update(size, csize, True)
         return id, size, csize
@@ -182,7 +182,7 @@ class Cache(object):
         count, size, csize = self.chunks[id]
         if count == 1:
             del self.chunks[id]
-            self.store.delete(id, wait=False)
+            self.repository.delete(id, wait=False)
         else:
             self.chunks[id] = (count - 1, size, csize)
 

+ 8 - 8
darc/helpers.py

@@ -22,12 +22,12 @@ class Manifest(object):
         self.config = {}
 
     @classmethod
-    def load(cls, store):
+    def load(cls, repository):
         from .key import key_factory
         manifest = cls()
-        manifest.store = store
-        cdata = store.get(manifest.MANIFEST_ID)
-        manifest.key = key = key_factory(store, cdata)
+        manifest.repository = repository
+        cdata = repository.get(manifest.MANIFEST_ID)
+        manifest.key = key = key_factory(repository, cdata)
         data = key.decrypt(None, cdata)
         manifest.id = key.id_hash(data)
         m = msgpack.unpackb(data)
@@ -44,7 +44,7 @@ class Manifest(object):
             'config': self.config,
         })
         self.id = self.key.id_hash(data)
-        self.store.put(self.MANIFEST_ID, self.key.encrypt(data))
+        self.repository.put(self.MANIFEST_ID, self.key.encrypt(data))
 
 
 def prune_split(archives, pattern, n, skip=[]):
@@ -81,13 +81,13 @@ class Statistics(object):
 
 
 def get_keys_dir():
-    """Determine where to store keys and cache"""
+    """Determine where to repository keys and cache"""
     return os.environ.get('DARC_KEYS_DIR',
                           os.path.join(os.path.expanduser('~'), '.darc', 'keys'))
 
 
 def get_cache_dir():
-    """Determine where to store keys and cache"""
+    """Determine where to repository keys and cache"""
     return os.environ.get('DARC_CACHE_DIR',
                           os.path.join(os.path.expanduser('~'), '.darc', 'cache'))
 
@@ -267,7 +267,7 @@ def group2gid(group):
 
 
 class Location(object):
-    """Object representing a store / archive location
+    """Object representing a repository / archive location
 
     >>> Location('ssh://user@host:1234/some/path::archive')
     Location(proto='ssh', user='user', host='host', port=1234, path='/some/path', archive='archive')

+ 34 - 34
darc/key.py

@@ -24,22 +24,22 @@ class HMAC(hmac.HMAC):
         self.inner.update(msg)
 
 
-def key_creator(store, args):
+def key_creator(repository, args):
     if args.keyfile:
-        return KeyfileKey.create(store, args)
+        return KeyfileKey.create(repository, args)
     elif args.passphrase:
-        return PassphraseKey.create(store, args)
+        return PassphraseKey.create(repository, args)
     else:
-        return PlaintextKey.create(store, args)
+        return PlaintextKey.create(repository, args)
 
 
-def key_factory(store, manifest_data):
+def key_factory(repository, manifest_data):
     if manifest_data[:1] == KEYFILE:
-        return KeyfileKey.detect(store, manifest_data)
+        return KeyfileKey.detect(repository, manifest_data)
     elif manifest_data[:1] == PASSPHRASE:
-        return PassphraseKey.detect(store, manifest_data)
+        return PassphraseKey.detect(repository, manifest_data)
     elif manifest_data[:1] == PLAINTEXT:
-        return PlaintextKey.detect(store, manifest_data)
+        return PlaintextKey.detect(repository, manifest_data)
     else:
         raise Exception('Unkown Key type %d' % ord(manifest_data[0]))
 
@@ -63,12 +63,12 @@ class PlaintextKey(KeyBase):
     chunk_seed = 0
 
     @classmethod
-    def create(cls, store, args):
+    def create(cls, repository, args):
         print('Encryption NOT enabled.\nUse the --key-file or --passphrase options to enable encryption.')
         return cls()
 
     @classmethod
-    def detect(cls, store, manifest_data):
+    def detect(cls, repository, manifest_data):
         return cls()
 
     def id_hash(self, data):
@@ -137,7 +137,7 @@ class PassphraseKey(AESKeyBase):
     iterations = 10000
 
     @classmethod
-    def create(cls, store, args):
+    def create(cls, repository, args):
         key = cls()
         passphrase = os.environ.get('DARC_PASSPHRASE')
         if passphrase is not None:
@@ -152,20 +152,20 @@ class PassphraseKey(AESKeyBase):
             passphrase2 = getpass('Enter same passphrase again: ')
             if passphrase != passphrase2:
                 print('Passphrases do not match')
-        key.init(store, passphrase)
+        key.init(repository, passphrase)
         if passphrase:
             print('Remember your passphrase. Your data will be inaccessible without it.')
         return key
 
     @classmethod
-    def detect(cls, store, manifest_data):
-        prompt = 'Enter passphrase for %s: ' % store._location.orig
+    def detect(cls, repository, manifest_data):
+        prompt = 'Enter passphrase for %s: ' % repository._location.orig
         key = cls()
         passphrase = os.environ.get('DARC_PASSPHRASE')
         if passphrase is None:
             passphrase = getpass(prompt)
         while True:
-            key.init(store, passphrase)
+            key.init(repository, passphrase)
             try:
                 key.decrypt(None, manifest_data)
                 key.init_ciphers(PREFIX + long_to_bytes(key.extract_iv(manifest_data) + 1000))
@@ -173,8 +173,8 @@ class PassphraseKey(AESKeyBase):
             except IntegrityError:
                 passphrase = getpass(prompt)
 
-    def init(self, store, passphrase):
-        self.init_from_random_data(pbkdf2_sha256(passphrase.encode('utf-8'), store.id, self.iterations, 100))
+    def init(self, repository, passphrase):
+        self.init_from_random_data(pbkdf2_sha256(passphrase.encode('utf-8'), repository.id, self.iterations, 100))
         self.init_ciphers()
 
 
@@ -185,9 +185,9 @@ class KeyfileKey(AESKeyBase):
     IV = PREFIX + long_to_bytes(1)
 
     @classmethod
-    def detect(cls, store, manifest_data):
+    def detect(cls, repository, manifest_data):
         key = cls()
-        path = cls.find_key_file(store)
+        path = cls.find_key_file(repository)
         prompt = 'Enter passphrase for key file %s: ' % path
         passphrase = os.environ.get('DARC_PASSPHRASE', '')
         while not key.load(path, passphrase):
@@ -196,8 +196,8 @@ class KeyfileKey(AESKeyBase):
         return key
 
     @classmethod
-    def find_key_file(cls, store):
-        id = hexlify(store.id).decode('ascii')
+    def find_key_file(cls, repository):
+        id = hexlify(repository.id).decode('ascii')
         keys_dir = get_keys_dir()
         for name in os.listdir(keys_dir):
             filename = os.path.join(keys_dir, name)
@@ -205,7 +205,7 @@ class KeyfileKey(AESKeyBase):
                 line = fd.readline().strip()
                 if line and line.startswith(cls.FILE_ID) and line[9:] == id:
                     return filename
-        raise Exception('Key file for store with ID %s not found' % id)
+        raise Exception('Key file for repository with ID %s not found' % id)
 
     def load(self, filename, passphrase):
         with open(filename, 'r') as fd:
@@ -215,7 +215,7 @@ class KeyfileKey(AESKeyBase):
             key = msgpack.unpackb(data)
             if key[b'version'] != 1:
                 raise IntegrityError('Invalid key file header')
-            self.store_id = key[b'store_id']
+            self.repository_id = key[b'repository_id']
             self.enc_key = key[b'enc_key']
             self.enc_hmac_key = key[b'enc_hmac_key']
             self.id_key = key[b'id_key']
@@ -253,7 +253,7 @@ class KeyfileKey(AESKeyBase):
     def save(self, path, passphrase):
         key = {
             'version': 1,
-            'store_id': self.store_id,
+            'repository_id': self.repository_id,
             'enc_key': self.enc_key,
             'enc_hmac_key': self.enc_hmac_key,
             'id_key': self.id_key,
@@ -261,7 +261,7 @@ class KeyfileKey(AESKeyBase):
         }
         data = self.encrypt_key_file(msgpack.packb(key), passphrase)
         with open(path, 'w') as fd:
-            fd.write('%s %s\n' % (self.FILE_ID, hexlify(self.store_id).decode('ascii')))
+            fd.write('%s %s\n' % (self.FILE_ID, hexlify(self.repository_id).decode('ascii')))
             fd.write(b2a_base64(data).decode('ascii'))
         self.path = path
 
@@ -276,8 +276,8 @@ class KeyfileKey(AESKeyBase):
         print('Key file "%s" updated' % self.path)
 
     @classmethod
-    def create(cls, store, args):
-        filename = args.store.to_key_filename()
+    def create(cls, repository, args):
+        filename = args.repository.to_key_filename()
         path = filename
         i = 1
         while os.path.exists(path):
@@ -294,7 +294,7 @@ class KeyfileKey(AESKeyBase):
             if passphrase != passphrase2:
                 print('Passphrases do not match')
         key = cls()
-        key.store_id = store.id
+        key.repository_id = repository.id
         key.init_from_random_data(get_random_bytes(100))
         key.init_ciphers()
         key.save(path, passphrase)
@@ -312,7 +312,7 @@ class KeyTestCase(unittest.TestCase):
     def tearDown(self):
         shutil.rmtree(self.tmppath)
 
-    class MockStore(object):
+    class MockRepository(object):
         class _Location(object):
             orig = '/some/place'
 
@@ -334,13 +334,13 @@ class KeyTestCase(unittest.TestCase):
 
     def test_keyfile(self):
         class MockArgs(object):
-            store = Location(tempfile.mkstemp()[1])
+            repository = Location(tempfile.mkstemp()[1])
         os.environ['DARC_PASSPHRASE'] = 'test'
-        key = KeyfileKey.create(self.MockStore(), MockArgs())
+        key = KeyfileKey.create(self.MockRepository(), MockArgs())
         self.assertEqual(bytes_to_long(key.enc_cipher.iv, 8), 0)
         manifest = key.encrypt(b'')
         iv = key.extract_iv(manifest)
-        key2 = KeyfileKey.detect(self.MockStore(), manifest)
+        key2 = KeyfileKey.detect(self.MockRepository(), manifest)
         self.assertEqual(bytes_to_long(key2.enc_cipher.iv, 8), iv + 1000)
         # Key data sanity check
         self.assertEqual(len(set([key2.id_key, key2.enc_key, key2.enc_hmac_key])), 3)
@@ -350,7 +350,7 @@ class KeyTestCase(unittest.TestCase):
 
     def test_passphrase(self):
         os.environ['DARC_PASSPHRASE'] = 'test'
-        key = PassphraseKey.create(self.MockStore(), None)
+        key = PassphraseKey.create(self.MockRepository(), None)
         self.assertEqual(bytes_to_long(key.enc_cipher.iv, 8), 0)
         self.assertEqual(hexlify(key.id_key), b'f28e915da78a972786da47fee6c4bd2960a421b9bdbdb35a7942eb82552e9a72')
         self.assertEqual(hexlify(key.enc_hmac_key), b'169c6082f209e524ea97e2c75318936f6e93c101b9345942a95491e9ae1738ca')
@@ -358,7 +358,7 @@ class KeyTestCase(unittest.TestCase):
         self.assertEqual(key.chunk_seed, -324662077)
         manifest = key.encrypt(b'')
         iv = key.extract_iv(manifest)
-        key2 = PassphraseKey.detect(self.MockStore(), manifest)
+        key2 = PassphraseKey.detect(self.MockRepository(), manifest)
         self.assertEqual(bytes_to_long(key2.enc_cipher.iv, 8), iv + 1000)
         self.assertEqual(key.id_key, key2.id_key)
         self.assertEqual(key.enc_hmac_key, key2.enc_hmac_key)

+ 13 - 13
darc/remote.py

@@ -7,16 +7,16 @@ import sys
 import getpass
 import unittest
 
-from .store import Store, StoreTestCase
+from .repository import Repository, RepositoryTestCase
 from .lrucache import LRUCache
 
 BUFSIZE = 10 * 1024 * 1024
 
 
-class StoreServer(object):
+class RepositoryServer(object):
 
     def __init__(self):
-        self.store = None
+        self.repository = None
 
     def serve(self):
         # Make stdin non-blocking
@@ -39,7 +39,7 @@ class StoreServer(object):
                         try:
                             f = getattr(self, method)
                         except AttributeError:
-                            f = getattr(self.store, method)
+                            f = getattr(self.repository, method)
                         res = f(*args)
                     except Exception as e:
                         sys.stdout.buffer.write(msgpack.packb((1, msgid, e.__class__.__name__, None)))
@@ -56,11 +56,11 @@ class StoreServer(object):
         path = os.fsdecode(path)
         if path.startswith('/~'):
             path = path[1:]
-        self.store = Store(os.path.expanduser(path), create)
-        return self.store.id
+        self.repository = Repository(os.path.expanduser(path), create)
+        return self.repository.id
 
 
-class RemoteStore(object):
+class RemoteRepository(object):
 
     class RPCError(Exception):
 
@@ -92,9 +92,9 @@ class RemoteStore(object):
             self.id = self.call('open', (location.path, create))
         except self.RPCError as e:
             if e.name == b'DoesNotExist':
-                raise Store.DoesNotExist
+                raise Repository.DoesNotExist
             elif e.name == b'AlreadyExists':
-                raise Store.AlreadyExists
+                raise Repository.AlreadyExists
 
     def __del__(self):
         self.close()
@@ -235,7 +235,7 @@ class RemoteStore(object):
                 return res
         except self.RPCError as e:
             if e.name == b'DoesNotExist':
-                raise Store.DoesNotExist
+                raise Repository.DoesNotExist
             raise
 
     def get_many(self, ids, peek=None):
@@ -264,15 +264,15 @@ class RemoteStore(object):
             self.p = None
 
 
-class RemoteStoreTestCase(StoreTestCase):
+class RemoteRepositoryTestCase(RepositoryTestCase):
 
     def open(self, create=False):
         from .helpers import Location
-        return RemoteStore(Location('localhost:' + os.path.join(self.tmppath, 'store')), create=create)
+        return RemoteRepository(Location('localhost:' + os.path.join(self.tmppath, 'repository')), create=create)
 
 
 def suite():
-    return unittest.TestLoader().loadTestsFromTestCase(RemoteStoreTestCase)
+    return unittest.TestLoader().loadTestsFromTestCase(RemoteRepositoryTestCase)
 
 if __name__ == '__main__':
     unittest.main()

+ 0 - 503
darc/store.py

@@ -1,503 +0,0 @@
-from configparser import RawConfigParser
-from binascii import hexlify, unhexlify
-import fcntl
-import os
-import re
-import shutil
-import struct
-import tempfile
-import unittest
-from zlib import crc32
-
-from .hashindex import NSIndex
-from .helpers import IntegrityError, read_msgpack, write_msgpack
-from .lrucache import LRUCache
-
-MAX_OBJECT_SIZE = 20 * 1024 * 1024
-
-TAG_PUT = 0
-TAG_DELETE = 1
-TAG_COMMIT = 2
-
-
-class Store(object):
-    """Filesystem based transactional key value store
-
-    On disk layout:
-    dir/README
-    dir/config
-    dir/data/<X / SEGMENTS_PER_DIR>/<X>
-    dir/index.X
-    dir/hints.X
-    """
-    DEFAULT_MAX_SEGMENT_SIZE = 5 * 1024 * 1024
-    DEFAULT_SEGMENTS_PER_DIR = 10000
-
-    class DoesNotExist(KeyError):
-        """Requested key does not exist"""
-
-    class AlreadyExists(KeyError):
-        """Requested key does not exist"""
-
-    def __init__(self, path, create=False):
-        self.io = None
-        if create:
-            self.create(path)
-        self.open(path)
-
-    def create(self, path):
-        """Create a new empty store at `path`
-        """
-        if os.path.exists(path) and (not os.path.isdir(path) or os.listdir(path)):
-            raise self.AlreadyExists(path)
-        if not os.path.exists(path):
-            os.mkdir(path)
-        with open(os.path.join(path, 'README'), 'w') as fd:
-            fd.write('This is a DARC store')
-        os.mkdir(os.path.join(path, 'data'))
-        config = RawConfigParser()
-        config.add_section('store')
-        config.set('store', 'version', '1')
-        config.set('store', 'segments_per_dir', self.DEFAULT_SEGMENTS_PER_DIR)
-        config.set('store', 'max_segment_size', self.DEFAULT_MAX_SEGMENT_SIZE)
-        config.set('store', 'id', hexlify(os.urandom(32)).decode('ascii'))
-        with open(os.path.join(path, 'config'), 'w') as fd:
-            config.write(fd)
-
-    def open(self, path):
-        self.head = None
-        self.path = path
-        if not os.path.isdir(path):
-            raise self.DoesNotExist(path)
-        self.lock_fd = open(os.path.join(path, 'README'), 'r+')
-        fcntl.flock(self.lock_fd, fcntl.LOCK_EX)
-        self.config = RawConfigParser()
-        self.config.read(os.path.join(self.path, 'config'))
-        if self.config.getint('store', 'version') != 1:
-            raise Exception('%s Does not look like a darc store')
-        self.max_segment_size = self.config.getint('store', 'max_segment_size')
-        self.segments_per_dir = self.config.getint('store', 'segments_per_dir')
-        self.id = unhexlify(self.config.get('store', 'id').strip().encode('ascii'))  # .encode needed for Python 3.[0-2]
-        self.rollback()
-
-    def close(self):
-        self.rollback()
-        self.lock_fd.close()
-
-    def commit(self, rollback=True):
-        """Commit transaction
-        """
-        self.io.write_commit()
-        self.compact_segments()
-        self.write_index()
-        self.rollback()
-
-    def _available_indices(self, reverse=False):
-        names = [int(name[6:]) for name in os.listdir(self.path) if re.match('index\.\d+', name)]
-        names.sort(reverse=reverse)
-        return names
-
-    def open_index(self, head, read_only=False):
-        if head is None:
-            self.index = NSIndex.create(os.path.join(self.path, 'index.tmp').encode('utf-8'))
-            self.segments = {}
-            self.compact = set()
-        else:
-            if read_only:
-                self.index = NSIndex((os.path.join(self.path, 'index.%d') % head).encode('utf-8'))
-            else:
-                shutil.copy(os.path.join(self.path, 'index.%d' % head),
-                            os.path.join(self.path, 'index.tmp'))
-                self.index = NSIndex(os.path.join(self.path, 'index.tmp').encode('utf-8'))
-            hints = read_msgpack(os.path.join(self.path, 'hints.%d' % head))
-            if hints[b'version'] != 1:
-                raise ValueError('Unknown hints file version: %d' % hints['version'])
-            self.segments = hints[b'segments']
-            self.compact = set(hints[b'compact'])
-
-    def write_index(self):
-        hints = {b'version': 1,
-                 b'segments': self.segments,
-                 b'compact': list(self.compact)}
-        write_msgpack(os.path.join(self.path, 'hints.%d' % self.io.head), hints)
-        self.index.flush()
-        os.rename(os.path.join(self.path, 'index.tmp'),
-                  os.path.join(self.path, 'index.%d' % self.io.head))
-        # Remove old indices
-        current = '.%d' % self.io.head
-        for name in os.listdir(self.path):
-            if not name.startswith('index.') and not name.startswith('hints.'):
-                continue
-            if name.endswith(current):
-                continue
-            os.unlink(os.path.join(self.path, name))
-
-    def compact_segments(self):
-        """Compact sparse segments by copying data into new segments
-        """
-        if not self.compact:
-            return
-
-        def lookup(tag, key):
-            return tag == TAG_PUT and self.index.get(key, (-1, -1))[0] == segment
-        segments = self.segments
-        for segment in sorted(self.compact):
-            if segments[segment] > 0:
-                for tag, key, data in self.io.iter_objects(segment, lookup, include_data=True):
-                    new_segment, offset = self.io.write_put(key, data)
-                    self.index[key] = new_segment, offset
-                    segments.setdefault(new_segment, 0)
-                    segments[new_segment] += 1
-                    segments[segment] -= 1
-                assert segments[segment] == 0
-        self.io.write_commit()
-        for segment in self.compact:
-            assert self.segments.pop(segment) == 0
-            self.io.delete_segment(segment)
-        self.compact = set()
-
-    def recover(self, path):
-        """Recover missing index by replaying logs"""
-        start = None
-        available = self._available_indices()
-        if available:
-            start = available[-1]
-        self.open_index(start)
-        for segment, filename in self.io._segment_names():
-            if start is not None and segment <= start:
-                continue
-            self.segments[segment] = 0
-            for tag, key, offset in self.io.iter_objects(segment):
-                if tag == TAG_PUT:
-                    try:
-                        s, _ = self.index[key]
-                        self.compact.add(s)
-                        self.segments[s] -= 1
-                    except KeyError:
-                        pass
-                    self.index[key] = segment, offset
-                    self.segments[segment] += 1
-                elif tag == TAG_DELETE:
-                    try:
-                        s, _ = self.index.pop(key)
-                        self.segments[s] -= 1
-                        self.compact.add(s)
-                        self.compact.add(segment)
-                    except KeyError:
-                        pass
-            if self.segments[segment] == 0:
-                self.compact.add(segment)
-        if self.io.head is not None:
-            self.write_index()
-
-    def rollback(self):
-        """
-        """
-        self._active_txn = False
-        if self.io:
-            self.io.close()
-        self.io = LoggedIO(self.path, self.max_segment_size, self.segments_per_dir)
-        if self.io.head is not None and not os.path.exists(os.path.join(self.path, 'index.%d' % self.io.head)):
-            self.recover(self.path)
-        self.open_index(self.io.head, read_only=True)
-
-    def _len(self):
-        return len(self.index)
-
-    def get(self, id):
-        try:
-            segment, offset = self.index[id]
-            return self.io.read(segment, offset, id)
-        except KeyError:
-            raise self.DoesNotExist
-
-    def get_many(self, ids, peek=None):
-        for id in ids:
-            yield self.get(id)
-
-    def put(self, id, data, wait=True):
-        if not self._active_txn:
-            self._active_txn = True
-            self.open_index(self.io.head)
-        try:
-            segment, _ = self.index[id]
-            self.segments[segment] -= 1
-            self.compact.add(segment)
-            segment = self.io.write_delete(id)
-            self.segments.setdefault(segment, 0)
-            self.compact.add(segment)
-        except KeyError:
-            pass
-        segment, offset = self.io.write_put(id, data)
-        self.segments.setdefault(segment, 0)
-        self.segments[segment] += 1
-        self.index[id] = segment, offset
-
-    def delete(self, id, wait=True):
-        if not self._active_txn:
-            self._active_txn = True
-            self.open_index(self.io.head)
-        try:
-            segment, offset = self.index.pop(id)
-            self.segments[segment] -= 1
-            self.compact.add(segment)
-            segment = self.io.write_delete(id)
-            self.compact.add(segment)
-            self.segments.setdefault(segment, 0)
-        except KeyError:
-            raise self.DoesNotExist
-
-    def add_callback(self, cb, data):
-        cb(None, None, data)
-
-
-class LoggedIO(object):
-
-    header_fmt = struct.Struct('<IIB')
-    assert header_fmt.size == 9
-    put_header_fmt = struct.Struct('<IIB32s')
-    assert put_header_fmt.size == 41
-    header_no_crc_fmt = struct.Struct('<IB')
-    assert header_no_crc_fmt.size == 5
-    crc_fmt = struct.Struct('<I')
-    assert crc_fmt.size == 4
-
-    _commit = header_no_crc_fmt.pack(9, TAG_COMMIT)
-    COMMIT = crc_fmt.pack(crc32(_commit)) + _commit
-
-    def __init__(self, path, limit, segments_per_dir, capacity=100):
-        self.path = path
-        self.fds = LRUCache(capacity)
-        self.segment = None
-        self.limit = limit
-        self.segments_per_dir = segments_per_dir
-        self.offset = 0
-        self._write_fd = None
-        self.head = None
-        self.cleanup()
-
-    def close(self):
-        for segment in list(self.fds.keys()):
-            self.fds.pop(segment).close()
-        self.close_segment()
-        self.fds = None  # Just to make sure we're disabled
-
-    def _segment_names(self, reverse=False):
-        for dirpath, dirs, filenames in os.walk(os.path.join(self.path, 'data')):
-            dirs.sort(key=int, reverse=reverse)
-            filenames.sort(key=int, reverse=reverse)
-            for filename in filenames:
-                yield int(filename), os.path.join(dirpath, filename)
-
-    def cleanup(self):
-        """Delete segment files left by aborted transactions
-        """
-        self.head = None
-        self.segment = 0
-        for segment, filename in self._segment_names(reverse=True):
-            if self.is_complete_segment(filename):
-                self.head = segment
-                self.segment = self.head + 1
-                return
-            else:
-                os.unlink(filename)
-
-    def is_complete_segment(self, filename):
-        with open(filename, 'rb') as fd:
-            fd.seek(-self.header_fmt.size, 2)
-            return fd.read(self.header_fmt.size) == self.COMMIT
-
-    def segment_filename(self, segment):
-        return os.path.join(self.path, 'data', str(segment // self.segments_per_dir), str(segment))
-
-    def get_write_fd(self, no_new=False):
-        if not no_new and self.offset and self.offset > self.limit:
-            self.close_segment()
-        if not self._write_fd:
-            if self.segment % self.segments_per_dir == 0:
-                dirname = os.path.join(self.path, 'data', str(self.segment // self.segments_per_dir))
-                if not os.path.exists(dirname):
-                    os.mkdir(dirname)
-            self._write_fd = open(self.segment_filename(self.segment), 'ab')
-            self._write_fd.write(b'DSEGMENT')
-            self.offset = 8
-        return self._write_fd
-
-    def get_fd(self, segment):
-        try:
-            return self.fds[segment]
-        except KeyError:
-            fd = open(self.segment_filename(segment), 'rb')
-            self.fds[segment] = fd
-            return fd
-
-    def delete_segment(self, segment):
-        try:
-            os.unlink(self.segment_filename(segment))
-        except OSError:
-            pass
-
-    def iter_objects(self, segment, lookup=None, include_data=False):
-        fd = self.get_fd(segment)
-        fd.seek(0)
-        if fd.read(8) != b'DSEGMENT':
-            raise IntegrityError('Invalid segment header')
-        offset = 8
-        header = fd.read(self.header_fmt.size)
-        while header:
-            crc, size, tag = self.header_fmt.unpack(header)
-            if size > MAX_OBJECT_SIZE:
-                raise IntegrityError('Invalid segment object size')
-            rest = fd.read(size - self.header_fmt.size)
-            if crc32(rest, crc32(memoryview(header)[4:])) & 0xffffffff != crc:
-                raise IntegrityError('Segment checksum mismatch')
-            if tag not in (TAG_PUT, TAG_DELETE, TAG_COMMIT):
-                raise IntegrityError('Invalid segment entry header')
-            key = None
-            if tag in (TAG_PUT, TAG_DELETE):
-                key = rest[:32]
-            if not lookup or lookup(tag, key):
-                if include_data:
-                    yield tag, key, rest[32:]
-                else:
-                    yield tag, key, offset
-            offset += size
-            header = fd.read(self.header_fmt.size)
-
-    def read(self, segment, offset, id):
-        if segment == self.segment:
-            self._write_fd.flush()
-        fd = self.get_fd(segment)
-        fd.seek(offset)
-        header = fd.read(self.put_header_fmt.size)
-        crc, size, tag, key = self.put_header_fmt.unpack(header)
-        if size > MAX_OBJECT_SIZE:
-            raise IntegrityError('Invalid segment object size')
-        data = fd.read(size - self.put_header_fmt.size)
-        if crc32(data, crc32(memoryview(header)[4:])) & 0xffffffff != crc:
-            raise IntegrityError('Segment checksum mismatch')
-        if tag != TAG_PUT or id != key:
-            raise IntegrityError('Invalid segment entry header')
-        return data
-
-    def write_put(self, id, data):
-        size = len(data) + self.put_header_fmt.size
-        fd = self.get_write_fd()
-        offset = self.offset
-        header = self.header_no_crc_fmt.pack(size, TAG_PUT)
-        crc = self.crc_fmt.pack(crc32(data, crc32(id, crc32(header))) & 0xffffffff)
-        fd.write(b''.join((crc, header, id, data)))
-        self.offset += size
-        return self.segment, offset
-
-    def write_delete(self, id):
-        fd = self.get_write_fd()
-        header = self.header_no_crc_fmt.pack(self.put_header_fmt.size, TAG_DELETE)
-        crc = self.crc_fmt.pack(crc32(id, crc32(header)) & 0xffffffff)
-        fd.write(b''.join((crc, header, id)))
-        self.offset += self.put_header_fmt.size
-        return self.segment
-
-    def write_commit(self):
-        fd = self.get_write_fd(no_new=True)
-        header = self.header_no_crc_fmt.pack(self.header_fmt.size, TAG_COMMIT)
-        crc = self.crc_fmt.pack(crc32(header) & 0xffffffff)
-        fd.write(b''.join((crc, header)))
-        self.head = self.segment
-        self.close_segment()
-
-    def close_segment(self):
-        if self._write_fd:
-            self.segment += 1
-            self.offset = 0
-            os.fsync(self._write_fd)
-            self._write_fd.close()
-            self._write_fd = None
-
-
-class StoreTestCase(unittest.TestCase):
-
-    def open(self, create=False):
-        return Store(os.path.join(self.tmppath, 'store'), create=create)
-
-    def setUp(self):
-        self.tmppath = tempfile.mkdtemp()
-        self.store = self.open(create=True)
-
-    def tearDown(self):
-        self.store.close()
-        shutil.rmtree(self.tmppath)
-
-    def test1(self):
-        for x in range(100):
-            self.store.put(('%-32d' % x).encode('ascii'), b'SOMEDATA')
-        key50 = ('%-32d' % 50).encode('ascii')
-        self.assertEqual(self.store.get(key50), b'SOMEDATA')
-        self.store.delete(key50)
-        self.assertRaises(Store.DoesNotExist, lambda: self.store.get(key50))
-        self.store.commit()
-        self.store.close()
-        store2 = self.open()
-        self.assertRaises(Store.DoesNotExist, lambda: store2.get(key50))
-        for x in range(100):
-            if x == 50:
-                continue
-            self.assertEqual(store2.get(('%-32d' % x).encode('ascii')), b'SOMEDATA')
-        store2.close()
-
-    def test2(self):
-        """Test multiple sequential transactions
-        """
-        self.store.put(b'00000000000000000000000000000000', b'foo')
-        self.store.put(b'00000000000000000000000000000001', b'foo')
-        self.store.commit()
-        self.store.delete(b'00000000000000000000000000000000')
-        self.store.put(b'00000000000000000000000000000001', b'bar')
-        self.store.commit()
-        self.assertEqual(self.store.get(b'00000000000000000000000000000001'), b'bar')
-
-    def test_consistency(self):
-        """Test cache consistency
-        """
-        self.store.put(b'00000000000000000000000000000000', b'foo')
-        self.assertEqual(self.store.get(b'00000000000000000000000000000000'), b'foo')
-        self.store.put(b'00000000000000000000000000000000', b'foo2')
-        self.assertEqual(self.store.get(b'00000000000000000000000000000000'), b'foo2')
-        self.store.put(b'00000000000000000000000000000000', b'bar')
-        self.assertEqual(self.store.get(b'00000000000000000000000000000000'), b'bar')
-        self.store.delete(b'00000000000000000000000000000000')
-        self.assertRaises(Store.DoesNotExist, lambda: self.store.get(b'00000000000000000000000000000000'))
-
-    def test_consistency2(self):
-        """Test cache consistency2
-        """
-        self.store.put(b'00000000000000000000000000000000', b'foo')
-        self.assertEqual(self.store.get(b'00000000000000000000000000000000'), b'foo')
-        self.store.commit()
-        self.store.put(b'00000000000000000000000000000000', b'foo2')
-        self.assertEqual(self.store.get(b'00000000000000000000000000000000'), b'foo2')
-        self.store.rollback()
-        self.assertEqual(self.store.get(b'00000000000000000000000000000000'), b'foo')
-
-    def test_single_kind_transactions(self):
-        # put
-        self.store.put(b'00000000000000000000000000000000', b'foo')
-        self.store.commit()
-        self.store.close()
-        # replace
-        self.store = self.open()
-        self.store.put(b'00000000000000000000000000000000', b'bar')
-        self.store.commit()
-        self.store.close()
-        # delete
-        self.store = self.open()
-        self.store.delete(b'00000000000000000000000000000000')
-        self.store.commit()
-
-
-
-def suite():
-    return unittest.TestLoader().loadTestsFromTestCase(StoreTestCase)
-
-if __name__ == '__main__':
-    unittest.main()

+ 41 - 41
darc/test.py

@@ -13,8 +13,8 @@ from . import helpers, lrucache, crypto
 from .chunker import chunkify, buzhash, buzhash_update
 from .archiver import Archiver
 from .key import suite as KeySuite
-from .store import Store, suite as StoreSuite
-from .remote import Store, suite as RemoteStoreSuite
+from .repository import Repository, suite as RepositorySuite
+from .remote import Repository, suite as RemoteRepositorySuite
 
 has_mtime_ns = sys.version >= '3.3'
 utime_supports_fd = os.utime in getattr(os, 'supports_fd', {})
@@ -27,8 +27,8 @@ class Test(unittest.TestCase):
     def setUp(self):
         self.archiver = Archiver()
         self.tmpdir = tempfile.mkdtemp()
-        self.store_path = os.path.join(self.tmpdir, 'store')
-        self.store_location = self.prefix + self.store_path
+        self.repository_path = os.path.join(self.tmpdir, 'repository')
+        self.repository_location = self.prefix + self.repository_path
         self.input_path = os.path.join(self.tmpdir, 'input')
         self.output_path = os.path.join(self.tmpdir, 'output')
         self.keys_path = os.path.join(self.tmpdir, 'keys')
@@ -62,8 +62,8 @@ class Test(unittest.TestCase):
 
     def create_src_archive(self, name):
         src_dir = os.path.join(os.getcwd(), os.path.dirname(__file__), '..')
-        self.darc('init', self.store_location)
-        self.darc('create', self.store_location + '::' + name, src_dir)
+        self.darc('init', self.repository_location)
+        self.darc('create', self.repository_location + '::' + name, src_dir)
 
     def create_regual_file(self, name, size=0):
         filename = os.path.join(self.input_path, name)
@@ -123,16 +123,16 @@ class Test(unittest.TestCase):
         os.symlink('somewhere', os.path.join(self.input_path, 'link1'))
         # FIFO node
         os.mkfifo(os.path.join(self.input_path, 'fifo1'))
-        self.darc('init', self.store_location)
-        self.darc('create', self.store_location + '::test', 'input')
-        self.darc('create', self.store_location + '::test.2', 'input')
-        self.darc('extract', self.store_location + '::test', 'output')
-        self.assertEqual(len(self.darc('list', self.store_location).splitlines()), 2)
-        self.assertEqual(len(self.darc('list', self.store_location + '::test').splitlines()), 9)
+        self.darc('init', self.repository_location)
+        self.darc('create', self.repository_location + '::test', 'input')
+        self.darc('create', self.repository_location + '::test.2', 'input')
+        self.darc('extract', self.repository_location + '::test', 'output')
+        self.assertEqual(len(self.darc('list', self.repository_location).splitlines()), 2)
+        self.assertEqual(len(self.darc('list', self.repository_location + '::test').splitlines()), 9)
         self.diff_dirs('input', 'output/input')
-        info_output = self.darc('info', self.store_location + '::test')
+        info_output = self.darc('info', self.repository_location + '::test')
         shutil.rmtree(self.cache_path)
-        info_output2 = self.darc('info', self.store_location + '::test')
+        info_output2 = self.darc('info', self.repository_location + '::test')
         # info_output2 starts with some "initializing cache" text but should
         # end the same way as info_output
         assert info_output2.endswith(info_output)
@@ -140,52 +140,52 @@ class Test(unittest.TestCase):
     def test_overwrite(self):
         self.create_regual_file('file1', size=1024 * 80)
         self.create_regual_file('dir2/file2', size=1024 * 80)
-        self.darc('init', self.store_location)
-        self.darc('create', self.store_location + '::test', 'input')
+        self.darc('init', self.repository_location)
+        self.darc('create', self.repository_location + '::test', 'input')
         # Overwriting regular files and directories should be supported
         os.mkdir('output/input')
         os.mkdir('output/input/file1')
         os.mkdir('output/input/dir2')
-        self.darc('extract', self.store_location + '::test', 'output')
+        self.darc('extract', self.repository_location + '::test', 'output')
         self.diff_dirs('input', 'output/input')
         # But non-empty dirs should fail
         os.unlink('output/input/file1')
         os.mkdir('output/input/file1')
         os.mkdir('output/input/file1/dir')
-        self.darc('extract', self.store_location + '::test', 'output', exit_code=1)
+        self.darc('extract', self.repository_location + '::test', 'output', exit_code=1)
 
     def test_delete(self):
         self.create_regual_file('file1', size=1024 * 80)
         self.create_regual_file('dir2/file2', size=1024 * 80)
-        self.darc('init', self.store_location)
-        self.darc('create', self.store_location + '::test', 'input')
-        self.darc('create', self.store_location + '::test.2', 'input')
-        self.darc('verify', self.store_location + '::test')
-        self.darc('verify', self.store_location + '::test.2')
-        self.darc('delete', self.store_location + '::test')
-        self.darc('verify', self.store_location + '::test.2')
-        self.darc('delete', self.store_location + '::test.2')
+        self.darc('init', self.repository_location)
+        self.darc('create', self.repository_location + '::test', 'input')
+        self.darc('create', self.repository_location + '::test.2', 'input')
+        self.darc('verify', self.repository_location + '::test')
+        self.darc('verify', self.repository_location + '::test.2')
+        self.darc('delete', self.repository_location + '::test')
+        self.darc('verify', self.repository_location + '::test.2')
+        self.darc('delete', self.repository_location + '::test.2')
         # Make sure all data except the manifest has been deleted
-        store = Store(self.store_path)
-        self.assertEqual(store._len(), 1)
+        repository = Repository(self.repository_path)
+        self.assertEqual(repository._len(), 1)
 
-    def test_corrupted_store(self):
+    def test_corrupted_repository(self):
         self.create_src_archive('test')
-        self.darc('verify', self.store_location + '::test')
-        name = sorted(os.listdir(os.path.join(self.tmpdir, 'store', 'data', '0')), reverse=True)[0]
-        fd = open(os.path.join(self.tmpdir, 'store', 'data', '0', name), 'r+')
+        self.darc('verify', self.repository_location + '::test')
+        name = sorted(os.listdir(os.path.join(self.tmpdir, 'repository', 'data', '0')), reverse=True)[0]
+        fd = open(os.path.join(self.tmpdir, 'repository', 'data', '0', name), 'r+')
         fd.seek(100)
         fd.write('X')
         fd.close()
-        self.darc('verify', self.store_location + '::test', exit_code=1)
+        self.darc('verify', self.repository_location + '::test', exit_code=1)
 
-    def test_prune_store(self):
+    def test_prune_repository(self):
         src_dir = os.path.join(os.getcwd(), os.path.dirname(__file__))
-        self.darc('init', self.store_location)
-        self.darc('create', self.store_location + '::test1', src_dir)
-        self.darc('create', self.store_location + '::test2', src_dir)
-        self.darc('prune', self.store_location, '--daily=2')
-        output = self.darc('list', self.store_location)
+        self.darc('init', self.repository_location)
+        self.darc('create', self.repository_location + '::test1', src_dir)
+        self.darc('create', self.repository_location + '::test2', src_dir)
+        self.darc('prune', self.repository_location, '--daily=2')
+        output = self.darc('list', self.repository_location)
         assert 'test1' not in output
         assert 'test2' in output
 
@@ -224,8 +224,8 @@ def suite():
     suite.addTest(unittest.TestLoader().loadTestsFromTestCase(Test))
     suite.addTest(unittest.TestLoader().loadTestsFromTestCase(RemoteTest))
     suite.addTest(KeySuite())
-    suite.addTest(StoreSuite())
-    suite.addTest(RemoteStoreSuite())
+    suite.addTest(RepositorySuite())
+    suite.addTest(RemoteRepositorySuite())
     suite.addTest(doctest.DocTestSuite(helpers))
     suite.addTest(lrucache.suite())
     suite.addTest(crypto.suite())

+ 8 - 9
docs/index.rst

@@ -10,16 +10,16 @@ Space efficient storage
    Variable block size `deduplication <http://en.wikipedia.org/wiki/Data_deduplication>`_
    is used to reduce the number of bytes stored by detecting redundant data.
    Each file is split into a number of variable length chunks and only chunks
-   that have never been seen before are added to the store.
+   that have never been seen before are compressed and added to the repository.
 
 Secure
     All data is encrypted using `AES256 <http://en.wikipedia.org/wiki/Advanced_Encryption_Standard>`_
     and the data integrity and authenticity is verified using
     `HMAC-SHA256 <http://en.wikipedia.org/wiki/HMAC>`_.
 
-Remote stores
+Remote repositories
     Darc can store data on remote hosts over SSH as long as Darc is installed on
-    the remote host. The following syntax is used to specify a remote store::
+    the remote host. The following syntax is used to specify a remote repository::
 
     $ darc list hostname:path
     $ darc extract hostname:path::archive-name
@@ -36,8 +36,8 @@ Archive
     A Darc archive is a collection of files along with metadata that include file
     permissions, directory structure and various file attributes.
 
-Store
-    A Darc store is a filesystem directory storing data from zero or more archives.
+Repository
+    A Darc repository is a filesystem directory storing data from zero or more archives.
     The data in a store is both deduplicated and encrypted making it both 
     efficient and safe.
 
@@ -49,10 +49,9 @@ Key file
 
 Requirements
 ------------
-* Python >= 2.5
-* pycrypto
+* Python >= 3.2
 * msgpack-python
-* paramiko (for remote store support)
+* pyxattr
 
 
 Installation
@@ -89,7 +88,7 @@ Initializing a store
 Before the first archive can be created a store needs to be initialized::
 
     $ darc init /data/my-backup.darc
-    Initializing store "/data/my-backup.darc"
+    Initializing repository at "/data/my-backup.darc"
     Key file password (Leave blank for no password): *****
     Key file password again: *****
     Key file "/home/YOU/.darc/keys/data_my_backup_darc" created.