Browse Source

Remove logging usage

Jonas Borgström 14 years ago
parent
commit
b5275ca526
6 changed files with 159 additions and 175 deletions
  1. 62 127
      darc/archive.py
  2. 78 21
      darc/archiver.py
  3. 1 3
      darc/cache.py
  4. 8 9
      darc/crypto.py
  5. 0 14
      darc/helpers.py
  6. 10 1
      darc/test.py

+ 62 - 127
darc/archive.py

@@ -1,6 +1,5 @@
 from datetime import datetime
 from getpass import getuser
-import logging
 import msgpack
 import os
 import socket
@@ -9,8 +8,7 @@ import sys
 
 from . import NS_ARCHIVE_METADATA, NS_ARCHIVE_ITEMS, NS_ARCHIVE_CHUNKS, NS_CHUNK
 from .chunkifier import chunkify
-from .helpers import uid2user, user2uid, gid2group, group2gid, \
-    IntegrityError, format_file_mode, format_time
+from .helpers import uid2user, user2uid, gid2group, group2gid, IntegrityError
 
 CHUNK_SIZE = 55001
 
@@ -19,6 +17,9 @@ have_lchmod = hasattr(os, 'lchmod')
 
 class Archive(object):
 
+    class DoesNotExist(Exception):
+        pass
+
     def __init__(self, store, crypto, name=None):
         self.crypto = crypto
         self.store = store
@@ -31,7 +32,10 @@ class Archive(object):
 
     def load(self, id):
         self.id = id
-        data, self.hash = self.crypto.decrypt(self.store.get(NS_ARCHIVE_METADATA, self.id))
+        try:
+            data, self.hash = self.crypto.decrypt(self.store.get(NS_ARCHIVE_METADATA, self.id))
+        except self.store.DoesNotExist:
+            raise self.DoesNotExist
         self.metadata = msgpack.unpackb(data)
         assert self.metadata['version'] == 1
 
@@ -84,7 +88,7 @@ class Archive(object):
         self.get_items()
         osize = csize = usize = 0
         for item in self.items:
-            if item['type'] == 'FILE':
+            if stat.S_ISREG(item['mode']) and not 'source' in item:
                 osize += item['size']
         for id, size in self.chunks:
             csize += size
@@ -92,51 +96,33 @@ class Archive(object):
                 usize += size
         return osize, csize, usize
 
-    def list(self):
-        tmap = dict(FILE='-', DIRECTORY='d', SYMLINK='l')
-        self.get_items()
-        for item in self.items:
-            type = tmap[item['type']]
-            mode = format_file_mode(item['mode'])
-            size = item.get('size', 0)
-            mtime = format_time(datetime.fromtimestamp(item['mtime']))
-            print '%s%s %-6s %-6s %8d %s %s' % (type, mode, item['user'],
-                                              item['group'], size, mtime, item['path'])
-
-    def extract(self, dest=None):
-        self.get_items()
+    def extract_item(self, item, dest=None):
         dest = dest or os.getcwdu()
         dir_stat_queue = []
-        for item in self.items:
-            assert item['path'][0] not in ('/', '\\', ':')
-            path = os.path.join(dest, item['path'].decode('utf-8'))
-            if item['type'] == 'DIRECTORY':
-                logging.info(path)
-                if not os.path.exists(path):
-                    os.makedirs(path)
-                dir_stat_queue.append((path, item))
-                continue
-            elif item['type'] == 'SYMLINK':
-                if not os.path.exists(os.path.dirname(path)):
-                    os.makedirs(os.path.dirname(path))
-                source = item['source']
-                logging.info('%s -> %s', path, source)
-                if os.path.exists(path):
-                    os.unlink(path)
-                os.symlink(source, path)
-                self.restore_stat(path, item, symlink=True)
-            elif item['type'] == 'HARDLINK':
-                if not os.path.exists(os.path.dirname(path)):
-                    os.makedirs(os.path.dirname(path))
+        assert item['path'][0] not in ('/', '\\', ':')
+        path = os.path.join(dest, item['path'].decode('utf-8'))
+        mode = item['mode']
+        if stat.S_ISDIR(mode):
+            if not os.path.exists(path):
+                os.makedirs(path)
+        elif stat.S_ISLNK(mode):
+            if not os.path.exists(os.path.dirname(path)):
+                os.makedirs(os.path.dirname(path))
+            source = item['source']
+            if os.path.exists(path):
+                os.unlink(path)
+            os.symlink(source, path)
+            self.restore_attrs(path, item, symlink=True)
+        elif stat.S_ISREG(mode):
+            if not os.path.exists(os.path.dirname(path)):
+                os.makedirs(os.path.dirname(path))
+            # Hard link?
+            if 'source' in item:
                 source = os.path.join(dest, item['source'])
-                logging.info('%s => %s', path, source)
                 if os.path.exists(path):
                     os.unlink(path)
                 os.link(source, path)
-            elif item['type'] == 'FILE':
-                logging.info(path)
-                if not os.path.exists(os.path.dirname(path)):
-                    os.makedirs(os.path.dirname(path))
+            else:
                 with open(path, 'wb') as fd:
                     for chunk in item['chunks']:
                         id = self.chunk_idx[chunk]
@@ -147,13 +133,11 @@ class Archive(object):
                             fd.write(data)
                         except ValueError:
                             raise Exception('Invalid chunk checksum')
-                self.restore_stat(path, item)
-            else:
-                raise Exception('Unknown archive item type %r' % item['type'])
-            if dir_stat_queue and not path.startswith(dir_stat_queue[-1][0]):
-                self.restore_stat(*dir_stat_queue.pop())
+                self.restore_attrs(path, item)
+        else:
+            raise Exception('Unknown archive item type %r' % item['mode'])
 
-    def restore_stat(self, path, item, symlink=False):
+    def restore_attrs(self, path, item, symlink=False):
         if have_lchmod:
             os.lchmod(path, item['mode'])
         elif not symlink:
@@ -168,22 +152,16 @@ class Archive(object):
             # FIXME: We should really call futimes here (c extension required)
             os.utime(path, (item['ctime'], item['mtime']))
 
-    def verify(self):
-        self.get_items()
-        for item in self.items:
-            if item['type'] == 'FILE':
-                item['path'] = item['path'].decode('utf-8')
-                for chunk in item['chunks']:
-                    id = self.chunk_idx[chunk]
-                    try:
-                        data, hash = self.crypto.decrypt(self.store.get(NS_CHUNK, id))
-                        if self.crypto.id_hash(data) != id:
-                            raise IntegrityError('chunk id did not match')
-                    except IntegrityError:
-                        logging.error('%s ... ERROR', item['path'])
-                        break
-                else:
-                    logging.info('%s ... OK', item['path'])
+    def verify_file(self, item):
+        for chunk in item['chunks']:
+            id = self.chunk_idx[chunk]
+            try:
+                data, hash = self.crypto.decrypt(self.store.get(NS_CHUNK, id))
+                if self.crypto.id_hash(data) != id:
+                    raise IntegrityError('chunk id did not match')
+            except IntegrityError:
+                return False
+        return True
 
     def delete(self, cache):
         self.get_items()
@@ -195,70 +173,35 @@ class Archive(object):
         self.store.commit()
         cache.save()
 
-    def _walk(self, path):
-        st = os.lstat(path)
-        yield path, st
-        if stat.S_ISDIR(st.st_mode):
-            for f in os.listdir(path):
-                for x in self._walk(os.path.join(path, f)):
-                    yield x
-
-    def create(self, name, paths, cache):
-        id = self.crypto.id_hash(name)
-        try:
-            self.store.get(NS_ARCHIVE_METADATA, id)
-        except self.store.DoesNotExist:
-            pass
-        else:
-            raise NameError('Archive already exists')
-        for path in paths:
-            for path, st in self._walk(unicode(path)):
-                if stat.S_ISDIR(st.st_mode):
-                    self.process_dir(path, st)
-                elif stat.S_ISLNK(st.st_mode):
-                    self.process_symlink(path, st)
-                elif stat.S_ISREG(st.st_mode):
-                    self.process_file(path, st, cache)
-                else:
-                    logging.error('Unknown file type: %s', path)
-        self.save(name)
-        cache.save()
-
-    def process_dir(self, path, st):
-        path = path.lstrip('/\\:')
-        logging.info(path)
-        self.items.append({
-            'type': 'DIRECTORY', 'path': path,
+    def stat_attrs(self, st):
+        return {
             'mode': st.st_mode,
             'uid': st.st_uid, 'user': uid2user(st.st_uid),
             'gid': st.st_gid, 'group': gid2group(st.st_gid),
             'ctime': st.st_ctime, 'mtime': st.st_mtime,
-        })
+        }
+
+    def process_dir(self, path, st):
+        item = {'path': path.lstrip('/\\:')}
+        item.update(self.stat_attrs(st))
+        self.items.append(item)
 
     def process_symlink(self, path, st):
         source = os.readlink(path)
-        path = path.lstrip('/\\:')
-        logging.info('%s -> %s', path, source)
-        self.items.append({
-            'type': 'SYMLINK', 'path': path, 'source': source,
-            'mode': st.st_mode,
-            'uid': st.st_uid, 'user': uid2user(st.st_uid),
-            'gid': st.st_gid, 'group': gid2group(st.st_gid),
-            'ctime': st.st_ctime, 'mtime': st.st_mtime,
-        })
+        item = {'path': path.lstrip('/\\:'), 'source': source}
+        item.update(self.stat_attrs(st))
+        self.items.append(item)
+
     def process_file(self, path, st, cache):
         safe_path = path.lstrip('/\\:')
         # Is it a hard link?
         if st.st_nlink > 1:
             source = self.hard_links.get((st.st_ino, st.st_dev))
             if (st.st_ino, st.st_dev) in self.hard_links:
-                logging.info('%s => %s', path, source)
-                self.items.append({ 'type': 'HARDLINK',
-                                    'path': path, 'source': source})
+                self.items.append({'path': path, 'source': source})
                 return
             else:
                 self.hard_links[st.st_ino, st.st_dev] = safe_path
-        logging.info(safe_path)
         path_hash = self.crypto.id_hash(path.encode('utf-8'))
         ids, size = cache.file_known_and_unchanged(path_hash, st)
         if ids is not None:
@@ -271,12 +214,8 @@ class Archive(object):
                 chunks = [self.process_chunk2(id, cache) for id in ids]
         # Only chunkify the file if needed
         if ids is None:
-            try:
-                fd = open(path, 'rb')
-            except IOError, e:
-                logging.error(e)
-                return
-            with fd:
+            fd = open(path, 'rb')
+            with open(path, 'rb') as fd:
                 size = 0
                 ids = []
                 chunks = []
@@ -289,13 +228,9 @@ class Archive(object):
                         chunks.append(self.process_chunk(id, chunk, cache))
                     size += len(chunk)
             cache.memorize_file_chunks(path_hash, st, ids)
-        self.items.append({
-            'type': 'FILE', 'path': safe_path, 'chunks': chunks, 'size': size,
-            'mode': st.st_mode,
-            'uid': st.st_uid, 'user': uid2user(st.st_uid),
-            'gid': st.st_gid, 'group': gid2group(st.st_gid),
-            'ctime': st.st_ctime, 'mtime': st.st_mtime,
-        })
+        item = {'path': safe_path, 'chunks': chunks, 'size': size}
+        item.update(self.stat_attrs(st))
+        self.items.append(item)
 
     def process_chunk2(self, id, cache):
         try:

+ 78 - 21
darc/archiver.py

@@ -1,43 +1,89 @@
 import argparse
-import logging
+from datetime import datetime
 import os
+import stat
 import sys
 
 from .archive import Archive
 from .store import Store
 from .cache import Cache
 from .crypto import CryptoManager, KeyChain
-from .helpers import location_validator, format_file_size, LevelFilter
+from .helpers import location_validator, format_file_size, format_time, format_file_mode
 
 
 class Archiver(object):
 
+    def __init__(self):
+        self.exit_code = 0
+
     def open_store(self, location):
         return Store(location.path)
 
-    def exit_code_from_logger(self):
-        return 1 if self.level_filter.count.get('ERROR') else 0
+    def print_error(self, msg, *args):
+        msg = args and msg % args or msg
+        self.exit_code = 1
+        print >> sys.stderr, msg
+
+    def print_verbose(self, msg, *args, **kw):
+        if self.verbose:
+            msg = args and msg % args or msg
+            if kw.get('newline', True):
+                print msg
+            else:
+                print msg,
+
+    def _walk(self, path):
+        st = os.lstat(path)
+        yield path, st
+        if stat.S_ISDIR(st.st_mode):
+            for f in os.listdir(path):
+                for x in self._walk(os.path.join(path, f)):
+                    yield x
 
     def do_init(self, args):
         Store(args.store.path, create=True)
-        return self.exit_code_from_logger()
+        return self.exit_code
 
     def do_create(self, args):
         store = self.open_store(args.archive)
         keychain = KeyChain(args.keychain)
         crypto = CryptoManager(keychain)
+        try:
+            Archive(store, crypto, args.archive.archive)
+        except Archive.DoesNotExist:
+            pass
+        else:
+            self.print_error('Archive already exists')
+            return self.exit_code
         archive = Archive(store, crypto)
         cache = Cache(store, archive.crypto)
-        archive.create(args.archive.archive, args.paths, cache)
-        return self.exit_code_from_logger()
+        for path in args.paths:
+            for path, st in self._walk(unicode(path)):
+                if stat.S_ISDIR(st.st_mode):
+                    archive.process_dir(path, st)
+                elif stat.S_ISLNK(st.st_mode):
+                    archive.process_symlink(path, st)
+                elif stat.S_ISREG(st.st_mode):
+                    try:
+                        archive.process_file(path, st, cache)
+                    except IOError, e:
+                        self.print_error('%s: %s', path, e)
+                else:
+                    self.print_error('Unknown file type: %s', path)
+        archive.save(args.archive.archive)
+        cache.save()
+        return self.exit_code
 
     def do_extract(self, args):
         store = self.open_store(args.archive)
         keychain = KeyChain(args.keychain)
         crypto = CryptoManager(keychain)
         archive = Archive(store, crypto, args.archive.archive)
-        archive.extract(args.dest)
-        return self.exit_code_from_logger()
+        archive.get_items()
+        for item in archive.items:
+            self.print_verbose(item['path'])
+            archive.extract_item(item, args.dest)
+        return self.exit_code
 
     def do_delete(self, args):
         store = self.open_store(args.archive)
@@ -46,27 +92,43 @@ class Archiver(object):
         archive = Archive(store, crypto, args.archive.archive)
         cache = Cache(store, archive.crypto)
         archive.delete(cache)
-        return self.exit_code_from_logger()
+        return self.exit_code
 
     def do_list(self, args):
         store = self.open_store(args.src)
         keychain = KeyChain(args.keychain)
         crypto = CryptoManager(keychain)
         if args.src.archive:
+            tmap = {1: 'p', 2: 'c', 4: 'd', 6: 'b', 010: '-', 012: 'l', 014: 's'}
             archive = Archive(store, crypto, args.src.archive)
-            archive.list()
+            archive.get_items()
+            for item in archive.items:
+                type = tmap.get(item['mode'] / 4096, '?')
+                mode = format_file_mode(item['mode'])
+                size = item.get('size', 0)
+                mtime = format_time(datetime.fromtimestamp(item['mtime']))
+                print '%s%s %-6s %-6s %8d %s %s' % (type, mode, item['user'],
+                                                  item['group'], size, mtime, item['path'])
         else:
             for archive in Archive.list_archives(store, crypto):
                 print '%(name)-20s %(time)s' % archive.metadata
-        return self.exit_code_from_logger()
+        return self.exit_code
 
     def do_verify(self, args):
         store = self.open_store(args.archive)
         keychain = KeyChain(args.keychain)
         crypto = CryptoManager(keychain)
         archive = Archive(store, crypto, args.archive.archive)
-        archive.verify()
-        return self.exit_code_from_logger()
+        archive.get_items()
+        for item in archive.items:
+            if stat.S_ISREG(item['mode']) and not 'source' in item:
+                self.print_verbose('%s ...', item['path'], newline=False)
+                if archive.verify_file(item):
+                    self.print_verbose('OK')
+                else:
+                    self.print_verbose('ERROR')
+                    self.print_error('%s: verification failed' % item['path'])
+        return self.exit_code
 
     def do_info(self, args):
         store = self.open_store(args.archive)
@@ -84,7 +146,7 @@ class Archiver(object):
         print 'Original size:', format_file_size(osize)
         print 'Compressed size:', format_file_size(csize)
         print 'Unique data:', format_file_size(usize)
-        return self.exit_code_from_logger()
+        return self.exit_code
 
     def do_keychain_generate(self, args):
         return KeyChain.generate(args.keychain)
@@ -165,12 +227,7 @@ class Archiver(object):
                                help='Archive to display information about')
 
         args = parser.parse_args(args)
-        if args.verbose:
-            logging.basicConfig(level=logging.INFO, format='%(message)s')
-        else:
-            logging.basicConfig(level=logging.WARNING, format='%(message)s')
-        self.level_filter = LevelFilter()
-        logging.getLogger('').addFilter(self.level_filter)
+        self.verbose = args.verbose
         return args.func(args)
 
 def main():

+ 1 - 3
darc/cache.py

@@ -1,5 +1,3 @@
-from itertools import ifilter
-import logging
 import msgpack
 import os
 
@@ -34,7 +32,7 @@ class Cache(object):
     def init(self):
         """Initializes cache by fetching and reading all archive indicies
         """
-        logging.info('Initializing cache...')
+        print 'Initializing cache...'
         self.chunk_counts = {}
         self.file_chunks = {}
         self.tid = self.store.tid

+ 8 - 9
darc/crypto.py

@@ -1,7 +1,6 @@
 from getpass import getpass
 import hashlib
 import os
-import logging
 import msgpack
 import zlib
 
@@ -26,7 +25,7 @@ class KeyChain(object):
             self.open(path)
 
     def open(self, path):
-        logging.info('Opening keychain "%s"', path)
+        print 'Opening keychain "%s"' % path
         with open(path, 'rb') as fd:
             if fd.read(len(self.FILE_ID)) != self.FILE_ID:
                 raise ValueError('Not a keychain')
@@ -38,7 +37,7 @@ class KeyChain(object):
                 raise Exception('Keychain decryption failed')
             data = self.decrypt(cdata, self.password)
             if not data:
-                logging.error('Incorrect password')
+                print 'Incorrect password'
         chain = msgpack.unpackb(data)
         assert chain['version'] == 1
         self.aes_id = chain['aes_id']
@@ -82,11 +81,11 @@ class KeyChain(object):
         with open(path, 'wb') as fd:
             fd.write(self.FILE_ID)
             fd.write(data)
-            logging.info('Key chain "%s" saved', path)
+            print 'Key chain "%s" saved' % path
 
     def restrict(self, path):
         if os.path.exists(path):
-            logging.error('%s already exists', path)
+            print '%s already exists' % path
             return 1
         self.rsa_read = self.rsa_read.publickey()
         self.save(path, self.password)
@@ -98,23 +97,23 @@ class KeyChain(object):
             password = getpass('New password: ')
             password2 = getpass('New password again: ')
             if password != password2:
-                logging.error('Passwords do not match')
+                print 'Passwords do not match'
         self.save(self.path, password)
         return 0
 
     @staticmethod
     def generate(path):
         if os.path.exists(path):
-            logging.error('%s already exists', path)
+            print '%s already exists' % path
             return 1
         password, password2 = 1, 2
         while password != password2:
             password = getpass('Keychain password: ')
             password2 = getpass('Keychain password again: ')
             if password != password2:
-                logging.error('Passwords do not match')
+                print 'Passwords do not match'
         chain = KeyChain()
-        logging.info('Generating keys')
+        print 'Generating keychain'
         chain.aes_id = os.urandom(32)
         chain.rsa_read = RSA.generate(2048)
         chain.rsa_create = RSA.generate(2048)

+ 0 - 14
darc/helpers.py

@@ -1,7 +1,6 @@
 import argparse
 from datetime import datetime
 import grp
-import logging
 import pwd
 import re
 
@@ -79,19 +78,6 @@ def group2gid(group):
         return None
 
 
-class LevelFilter(logging.Filter):
-    """Filter that counts record levels
-    """
-    def __init__(self, *args, **kwargs):
-        logging.Filter.__init__(self, *args, **kwargs)
-        self.count = {}
-
-    def filter(self, record):
-        self.count.setdefault(record.levelname, 0)
-        self.count[record.levelname] += 1
-        return record
-
-
 class Location(object):
 
     loc_re = re.compile(r'^((?:(?P<user>[^@]+)@)?(?P<host>[^:]+):)?'

+ 10 - 1
darc/test.py

@@ -1,4 +1,6 @@
 import os
+from StringIO import StringIO
+import sys
 import shutil
 import tempfile
 import unittest
@@ -24,7 +26,14 @@ class Test(unittest.TestCase):
     def darc(self, *args, **kwargs):
         exit_code = kwargs.get('exit_code', 0)
         args = ['--keychain', self.keychain] + list(args)
-        self.assertEqual(exit_code, self.archiver.run(args))
+        try:
+            stdout, stderr = sys.stdout, sys.stderr
+            output = StringIO()
+            sys.stdout = sys.stderr = output
+            self.assertEqual(exit_code, self.archiver.run(args))
+            return output.getvalue()
+        finally:
+            sys.stdout, sys.stderr = stdout, stderr
 
     def create_src_archive(self, name):
         src_dir = os.path.join(os.getcwd(), os.path.dirname(__file__))