Bläddra i källkod

Merge branch 'logging-refactor' into upstream

Antoine Beaupré 9 år sedan
förälder
incheckning
eacb0b9e83
10 ändrade filer med 271 tillägg och 85 borttagningar
  1. 31 5
      borg/archive.py
  2. 27 35
      borg/archiver.py
  3. 28 9
      borg/cache.py
  4. 16 10
      borg/helpers.py
  5. 12 9
      borg/key.py
  6. 91 0
      borg/logger.py
  7. 5 2
      borg/repository.py
  8. 44 0
      borg/testsuite/logger.py
  9. 0 2
      borg/testsuite/upgrader.py
  10. 17 13
      borg/upgrader.py

+ 31 - 5
borg/archive.py

@@ -1,7 +1,12 @@
+from binascii import hexlify
 from datetime import datetime
 from datetime import datetime
 from getpass import getuser
 from getpass import getuser
 from itertools import groupby
 from itertools import groupby
 import errno
 import errno
+import logging
+
+from .logger import create_logger
+logger = create_logger()
 from .key import key_factory
 from .key import key_factory
 from .remote import cache_if_remote
 from .remote import cache_if_remote
 
 
@@ -12,7 +17,7 @@ import sys
 import time
 import time
 from io import BytesIO
 from io import BytesIO
 from . import xattr
 from . import xattr
-from .helpers import parse_timestamp, Error, uid2user, user2uid, gid2group, group2gid, \
+from .helpers import parse_timestamp, Error, uid2user, user2uid, gid2group, group2gid, format_timedelta, \
     Manifest, Statistics, decode_dict, st_mtime_ns, make_path_safe, StableDict, int_to_bigint, bigint_to_int, have_cython
     Manifest, Statistics, decode_dict, st_mtime_ns, make_path_safe, StableDict, int_to_bigint, bigint_to_int, have_cython
 if have_cython():
 if have_cython():
     from .platform import acl_get, acl_set
     from .platform import acl_get, acl_set
@@ -132,7 +137,8 @@ class Archive:
 
 
     def __init__(self, repository, key, manifest, name, cache=None, create=False,
     def __init__(self, repository, key, manifest, name, cache=None, create=False,
                  checkpoint_interval=300, numeric_owner=False, progress=False,
                  checkpoint_interval=300, numeric_owner=False, progress=False,
-                 chunker_params=CHUNKER_PARAMS):
+                 chunker_params=CHUNKER_PARAMS,
+                 start=datetime.now(), end=datetime.now()):
         self.cwd = os.getcwd()
         self.cwd = os.getcwd()
         self.key = key
         self.key = key
         self.repository = repository
         self.repository = repository
@@ -145,6 +151,8 @@ class Archive:
         self.name = name
         self.name = name
         self.checkpoint_interval = checkpoint_interval
         self.checkpoint_interval = checkpoint_interval
         self.numeric_owner = numeric_owner
         self.numeric_owner = numeric_owner
+        self.start = start
+        self.end = end
         self.pipeline = DownloadPipeline(self.repository, self.key)
         self.pipeline = DownloadPipeline(self.repository, self.key)
         if create:
         if create:
             self.items_buffer = CacheChunkBuffer(self.cache, self.key, self.stats, chunker_params)
             self.items_buffer = CacheChunkBuffer(self.cache, self.key, self.stats, chunker_params)
@@ -184,6 +192,24 @@ class Archive:
         """Timestamp of archive creation in UTC"""
         """Timestamp of archive creation in UTC"""
         return parse_timestamp(self.metadata[b'time'])
         return parse_timestamp(self.metadata[b'time'])
 
 
+    @property
+    def fpr(self):
+        return hexlify(self.id).decode('ascii')
+
+    @property
+    def duration(self):
+        return format_timedelta(self.end-self.start)
+
+    def __str__(self):
+        buf = '''Archive name: {0.name}
+Archive fingerprint: {0.fpr}
+Start time: {0.start:%c}
+End time: {0.end:%c}
+Duration: {0.duration}
+Number of files: {0.stats.nfiles}
+{0.cache}'''.format(self)
+        return buf
+
     def __repr__(self):
     def __repr__(self):
         return 'Archive(%r)' % self.name
         return 'Archive(%r)' % self.name
 
 
@@ -633,7 +659,7 @@ class ArchiveChecker:
         self.orphan_chunks_check()
         self.orphan_chunks_check()
         self.finish()
         self.finish()
         if not self.error_found:
         if not self.error_found:
-            self.report_progress('Archive consistency check complete, no problems found.')
+            logger.info('Archive consistency check complete, no problems found.')
         return self.repair or not self.error_found
         return self.repair or not self.error_found
 
 
     def init_chunks(self):
     def init_chunks(self):
@@ -655,7 +681,7 @@ class ArchiveChecker:
     def report_progress(self, msg, error=False):
     def report_progress(self, msg, error=False):
         if error:
         if error:
             self.error_found = True
             self.error_found = True
-        print(msg, file=sys.stderr if error else sys.stdout)
+        logger.log(logging.ERROR if error else logging.WARNING, msg)
 
 
     def identify_key(self, repository):
     def identify_key(self, repository):
         cdata = repository.get(next(self.chunks.iteritems())[0])
         cdata = repository.get(next(self.chunks.iteritems())[0])
@@ -782,7 +808,7 @@ class ArchiveChecker:
             num_archives = 1
             num_archives = 1
             end = 1
             end = 1
         for i, (name, info) in enumerate(archive_items[:end]):
         for i, (name, info) in enumerate(archive_items[:end]):
-            self.report_progress('Analyzing archive {} ({}/{})'.format(name, num_archives - i, num_archives))
+            logger.info('Analyzing archive {} ({}/{})'.format(name, num_archives - i, num_archives))
             archive_id = info[b'id']
             archive_id = info[b'id']
             if archive_id not in self.chunks:
             if archive_id not in self.chunks:
                 self.report_progress('Archive metadata block is missing', error=True)
                 self.report_progress('Archive metadata block is missing', error=True)

+ 27 - 35
borg/archiver.py

@@ -20,6 +20,8 @@ from .helpers import Error, location_validator, format_time, format_file_size, \
     get_cache_dir, get_keys_dir, format_timedelta, prune_within, prune_split, \
     get_cache_dir, get_keys_dir, format_timedelta, prune_within, prune_split, \
     Manifest, remove_surrogates, update_excludes, format_archive, check_extension_modules, Statistics, \
     Manifest, remove_surrogates, update_excludes, format_archive, check_extension_modules, Statistics, \
     is_cachedir, bigint_to_int, ChunkerParams, CompressionSpec, have_cython
     is_cachedir, bigint_to_int, ChunkerParams, CompressionSpec, have_cython
+from .logger import create_logger, setup_logging
+logger = create_logger()
 if have_cython():
 if have_cython():
     from .compress import Compressor, COMPR_BUFFER
     from .compress import Compressor, COMPR_BUFFER
     from .upgrader import AtticRepositoryUpgrader
     from .upgrader import AtticRepositoryUpgrader
@@ -48,15 +50,11 @@ class Archiver:
     def print_error(self, msg, *args):
     def print_error(self, msg, *args):
         msg = args and msg % args or msg
         msg = args and msg % args or msg
         self.exit_code = 1
         self.exit_code = 1
-        print('borg: ' + msg, file=sys.stderr)
+        logger.error('borg: ' + msg)
 
 
     def print_verbose(self, msg, *args, **kw):
     def print_verbose(self, msg, *args, **kw):
-        if self.verbose:
-            msg = args and msg % args or msg
-            if kw.get('newline', True):
-                print(msg)
-            else:
-                print(msg, end=' ')
+        msg = args and msg % args or msg
+        logger.info(msg)
 
 
     def do_serve(self, args):
     def do_serve(self, args):
         """Start in server mode. This command is usually not used manually.
         """Start in server mode. This command is usually not used manually.
@@ -65,7 +63,7 @@ class Archiver:
 
 
     def do_init(self, args):
     def do_init(self, args):
         """Initialize an empty repository"""
         """Initialize an empty repository"""
-        print('Initializing repository at "%s"' % args.repository.orig)
+        logger.info('Initializing repository at "%s"' % args.repository.orig)
         repository = self.open_repository(args.repository, create=True, exclusive=True)
         repository = self.open_repository(args.repository, create=True, exclusive=True)
         key = key_creator(repository, args)
         key = key_creator(repository, args)
         manifest = Manifest(key, repository)
         manifest = Manifest(key, repository)
@@ -87,9 +85,9 @@ Type "Yes I am sure" if you understand this and want to continue.\n""")
                 if input('Do you want to continue? ') == 'Yes I am sure':
                 if input('Do you want to continue? ') == 'Yes I am sure':
                     break
                     break
         if not args.archives_only:
         if not args.archives_only:
-            print('Starting repository check...')
+            logger.warning('Starting repository check...')
             if repository.check(repair=args.repair):
             if repository.check(repair=args.repair):
-                print('Repository check complete, no problems found.')
+                logger.info('Repository check complete, no problems found.')
             else:
             else:
                 return 1
                 return 1
         if not args.repo_only and not ArchiveChecker().check(
         if not args.repo_only and not ArchiveChecker().check(
@@ -118,7 +116,7 @@ Type "Yes I am sure" if you understand this and want to continue.\n""")
             archive = Archive(repository, key, manifest, args.archive.archive, cache=cache,
             archive = Archive(repository, key, manifest, args.archive.archive, cache=cache,
                               create=True, checkpoint_interval=args.checkpoint_interval,
                               create=True, checkpoint_interval=args.checkpoint_interval,
                               numeric_owner=args.numeric_owner, progress=args.progress,
                               numeric_owner=args.numeric_owner, progress=args.progress,
-                              chunker_params=args.chunker_params)
+                              chunker_params=args.chunker_params, start=t0)
         else:
         else:
             archive = cache = None
             archive = cache = None
         # Add cache dir to inode_skip list
         # Add cache dir to inode_skip list
@@ -163,16 +161,10 @@ Type "Yes I am sure" if you understand this and want to continue.\n""")
             if args.progress:
             if args.progress:
                 archive.stats.show_progress(final=True)
                 archive.stats.show_progress(final=True)
             if args.stats:
             if args.stats:
-                t = datetime.now()
-                diff = t - t0
+                archive.end = datetime.now()
                 print('-' * 78)
                 print('-' * 78)
-                print('Archive name: %s' % args.archive.archive)
-                print('Archive fingerprint: %s' % hexlify(archive.id).decode('ascii'))
-                print('Start time: %s' % t0.strftime('%c'))
-                print('End time: %s' % t.strftime('%c'))
-                print('Duration: %s' % format_timedelta(diff))
-                print('Number of files: %d' % archive.stats.nfiles)
-                archive.stats.print_('This archive:', cache)
+                print(str(archive))
+                print(archive.stats.print_('This archive:', cache))
                 print('-' * 78)
                 print('-' * 78)
         return self.exit_code
         return self.exit_code
 
 
@@ -252,7 +244,7 @@ Type "Yes I am sure" if you understand this and want to continue.\n""")
         """Extract archive contents"""
         """Extract archive contents"""
         # be restrictive when restoring files, restore permissions later
         # be restrictive when restoring files, restore permissions later
         if sys.getfilesystemencoding() == 'ascii':
         if sys.getfilesystemencoding() == 'ascii':
-            print('Warning: File system encoding is "ascii", extracting non-ascii filenames will not be supported.')
+            logger.warning('Warning: File system encoding is "ascii", extracting non-ascii filenames will not be supported.')
         repository = self.open_repository(args.archive)
         repository = self.open_repository(args.archive)
         manifest, key = Manifest.load(repository)
         manifest, key = Manifest.load(repository)
         archive = Archive(repository, key, manifest, args.archive.archive,
         archive = Archive(repository, key, manifest, args.archive.archive,
@@ -318,21 +310,22 @@ Type "Yes I am sure" if you understand this and want to continue.\n""")
             repository.commit()
             repository.commit()
             cache.commit()
             cache.commit()
             if args.stats:
             if args.stats:
-                stats.print_('Deleted data:', cache)
+                logger.info(stats.print_('Deleted data:', cache))
         else:
         else:
             if not args.cache_only:
             if not args.cache_only:
-                print("You requested to completely DELETE the repository *including* all archives it contains:")
+                print("You requested to completely DELETE the repository *including* all archives it contains:", file=sys.stderr)
                 for archive_info in manifest.list_archive_infos(sort_by='ts'):
                 for archive_info in manifest.list_archive_infos(sort_by='ts'):
-                    print(format_archive(archive_info))
+                    print(format_archive(archive_info), file=sys.stderr)
                 if not os.environ.get('BORG_CHECK_I_KNOW_WHAT_I_AM_DOING'):
                 if not os.environ.get('BORG_CHECK_I_KNOW_WHAT_I_AM_DOING'):
-                    print("""Type "YES" if you understand this and want to continue.\n""")
+                    print("""Type "YES" if you understand this and want to continue.\n""", file=sys.stderr)
+                    # XXX: prompt may end up on stdout, but we'll assume that input() does the right thing
                     if input('Do you want to continue? ') != 'YES':
                     if input('Do you want to continue? ') != 'YES':
                         self.exit_code = 1
                         self.exit_code = 1
                         return self.exit_code
                         return self.exit_code
                 repository.destroy()
                 repository.destroy()
-                print("Repository deleted.")
+                logger.info("Repository deleted.")
             cache.destroy()
             cache.destroy()
-            print("Cache deleted.")
+            logger.info("Cache deleted.")
         return self.exit_code
         return self.exit_code
 
 
     def do_mount(self, args):
     def do_mount(self, args):
@@ -418,7 +411,7 @@ Type "Yes I am sure" if you understand this and want to continue.\n""")
         print('Time: %s' % to_localtime(archive.ts).strftime('%c'))
         print('Time: %s' % to_localtime(archive.ts).strftime('%c'))
         print('Command line:', remove_surrogates(' '.join(archive.metadata[b'cmdline'])))
         print('Command line:', remove_surrogates(' '.join(archive.metadata[b'cmdline'])))
         print('Number of files: %d' % stats.nfiles)
         print('Number of files: %d' % stats.nfiles)
-        stats.print_('This archive:', cache)
+        print(stats.print_('This archive:', cache))
         return self.exit_code
         return self.exit_code
 
 
     def do_prune(self, args):
     def do_prune(self, args):
@@ -463,7 +456,7 @@ Type "Yes I am sure" if you understand this and want to continue.\n""")
             repository.commit()
             repository.commit()
             cache.commit()
             cache.commit()
         if args.stats:
         if args.stats:
-            stats.print_('Deleted data:', cache)
+            logger.info(stats.print_('Deleted data:', cache))
         return self.exit_code
         return self.exit_code
 
 
     def do_upgrade(self, args):
     def do_upgrade(self, args):
@@ -551,9 +544,8 @@ Type "Yes I am sure" if you understand this and want to continue.\n""")
 
 
     def build_parser(self, args=None, prog=None):
     def build_parser(self, args=None, prog=None):
         common_parser = argparse.ArgumentParser(add_help=False, prog=prog)
         common_parser = argparse.ArgumentParser(add_help=False, prog=prog)
-        common_parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
-                                   default=False,
-                                   help='verbose output')
+        common_parser.add_argument('-v', '--verbose', dest='verbose', action='count',
+                                   help='verbose output, defaults to warnings only')
         common_parser.add_argument('--no-files-cache', dest='cache_files', action='store_false',
         common_parser.add_argument('--no-files-cache', dest='cache_files', action='store_false',
                                    help='do not load/update the file metadata cache used to detect unchanged files')
                                    help='do not load/update the file metadata cache used to detect unchanged files')
         common_parser.add_argument('--umask', dest='umask', type=lambda s: int(s, 8), default=RemoteRepository.umask, metavar='M',
         common_parser.add_argument('--umask', dest='umask', type=lambda s: int(s, 8), default=RemoteRepository.umask, metavar='M',
@@ -983,7 +975,7 @@ Type "Yes I am sure" if you understand this and want to continue.\n""")
         parser = self.build_parser(args)
         parser = self.build_parser(args)
 
 
         args = parser.parse_args(args or ['-h'])
         args = parser.parse_args(args or ['-h'])
-        self.verbose = args.verbose
+        setup_logging(args)
         os.umask(args.umask)
         os.umask(args.umask)
         RemoteRepository.remote_path = args.remote_path
         RemoteRepository.remote_path = args.remote_path
         RemoteRepository.umask = args.umask
         RemoteRepository.umask = args.umask
@@ -1002,7 +994,7 @@ def sig_info_handler(signum, stack):  # pragma: no cover
                 total = loc['st'].st_size
                 total = loc['st'].st_size
             except Exception:
             except Exception:
                 pos, total = 0, 0
                 pos, total = 0, 0
-            print("{0} {1}/{2}".format(path, format_file_size(pos), format_file_size(total)))
+            logger.warning("{0} {1}/{2}".format(path, format_file_size(pos), format_file_size(total)))
             break
             break
         if func in ('extract_item', ):  # extract op
         if func in ('extract_item', ):  # extract op
             path = loc['item'][b'path']
             path = loc['item'][b'path']
@@ -1010,7 +1002,7 @@ def sig_info_handler(signum, stack):  # pragma: no cover
                 pos = loc['fd'].tell()
                 pos = loc['fd'].tell()
             except Exception:
             except Exception:
                 pos = 0
                 pos = 0
-            print("{0} {1}/???".format(path, format_file_size(pos)))
+            logger.warning("{0} {1}/???".format(path, format_file_size(pos)))
             break
             break
 
 
 
 

+ 28 - 9
borg/cache.py

@@ -1,5 +1,6 @@
 import configparser
 import configparser
 from .remote import cache_if_remote
 from .remote import cache_if_remote
+from collections import namedtuple
 import errno
 import errno
 import os
 import os
 import stat
 import stat
@@ -10,8 +11,10 @@ import tarfile
 import tempfile
 import tempfile
 
 
 from .key import PlaintextKey
 from .key import PlaintextKey
+from .logger import create_logger
+logger = create_logger()
 from .helpers import Error, get_cache_dir, decode_dict, st_mtime_ns, unhexlify, int_to_bigint, \
 from .helpers import Error, get_cache_dir, decode_dict, st_mtime_ns, unhexlify, int_to_bigint, \
-    bigint_to_int, have_cython
+    bigint_to_int, format_file_size, have_cython
 from .locking import UpgradableLock
 from .locking import UpgradableLock
 from .hashindex import ChunkIndex
 from .hashindex import ChunkIndex
 
 
@@ -72,10 +75,26 @@ class Cache:
     def __del__(self):
     def __del__(self):
         self.close()
         self.close()
 
 
+    def __str__(self):
+        return format(self, """\
+All archives:   {0.total_size:>20s} {0.total_csize:>20s} {0.unique_csize:>20s}
+
+                       Unique chunks         Total chunks
+Chunk index:    {0.total_unique_chunks:20d} {0.total_chunks:20d}""")
+
+    def __format__(self, format_spec):
+        # XXX: this should really be moved down to `hashindex.pyx`
+        Summary = namedtuple('Summary', ['total_size', 'total_csize', 'unique_size', 'unique_csize', 'total_unique_chunks', 'total_chunks'])
+        stats = Summary(*self.chunks.summarize())._asdict()
+        for field in ['total_size', 'total_csize', 'unique_csize']:
+            stats[field] = format_file_size(stats[field])
+        stats = Summary(**stats)
+        return format_spec.format(stats)
+
     def _confirm(self, message, env_var_override=None):
     def _confirm(self, message, env_var_override=None):
         print(message, file=sys.stderr)
         print(message, file=sys.stderr)
         if env_var_override and os.environ.get(env_var_override):
         if env_var_override and os.environ.get(env_var_override):
-            print("Yes (From {})".format(env_var_override))
+            print("Yes (From {})".format(env_var_override), file=sys.stderr)
             return True
             return True
         if not sys.stdin.isatty():
         if not sys.stdin.isatty():
             return False
             return False
@@ -265,7 +284,7 @@ class Cache:
                 unpacker.feed(data)
                 unpacker.feed(data)
                 for item in unpacker:
                 for item in unpacker:
                     if not isinstance(item, dict):
                     if not isinstance(item, dict):
-                        print('Error: Did not get expected metadata dict - archive corrupted!')
+                        logger.error('Error: Did not get expected metadata dict - archive corrupted!')
                         continue
                         continue
                     if b'chunks' in item:
                     if b'chunks' in item:
                         for chunk_id, size, csize in item[b'chunks']:
                         for chunk_id, size, csize in item[b'chunks']:
@@ -287,10 +306,10 @@ class Cache:
                     return name
                     return name
 
 
         def create_master_idx(chunk_idx):
         def create_master_idx(chunk_idx):
-            print('Synchronizing chunks cache...')
+            logger.info('Synchronizing chunks cache...')
             cached_ids = cached_archives()
             cached_ids = cached_archives()
             archive_ids = repo_archives()
             archive_ids = repo_archives()
-            print('Archives: %d, w/ cached Idx: %d, w/ outdated Idx: %d, w/o cached Idx: %d.' % (
+            logger.info('Archives: %d, w/ cached Idx: %d, w/ outdated Idx: %d, w/o cached Idx: %d.' % (
                 len(archive_ids), len(cached_ids),
                 len(archive_ids), len(cached_ids),
                 len(cached_ids - archive_ids), len(archive_ids - cached_ids), ))
                 len(cached_ids - archive_ids), len(archive_ids - cached_ids), ))
             # deallocates old hashindex, creates empty hashindex:
             # deallocates old hashindex, creates empty hashindex:
@@ -302,12 +321,12 @@ class Cache:
                     archive_name = lookup_name(archive_id)
                     archive_name = lookup_name(archive_id)
                     if archive_id in cached_ids:
                     if archive_id in cached_ids:
                         archive_chunk_idx_path = mkpath(archive_id)
                         archive_chunk_idx_path = mkpath(archive_id)
-                        print("Reading cached archive chunk index for %s ..." % archive_name)
+                        logger.info("Reading cached archive chunk index for %s ..." % archive_name)
                         archive_chunk_idx = ChunkIndex.read(archive_chunk_idx_path)
                         archive_chunk_idx = ChunkIndex.read(archive_chunk_idx_path)
                     else:
                     else:
-                        print('Fetching and building archive index for %s ...' % archive_name)
+                        logger.info('Fetching and building archive index for %s ...' % archive_name)
                         archive_chunk_idx = fetch_and_build_idx(archive_id, repository, self.key)
                         archive_chunk_idx = fetch_and_build_idx(archive_id, repository, self.key)
-                    print("Merging into master chunks index ...")
+                    logger.info("Merging into master chunks index ...")
                     if chunk_idx is None:
                     if chunk_idx is None:
                         # we just use the first archive's idx as starting point,
                         # we just use the first archive's idx as starting point,
                         # to avoid growing the hash table from 0 size and also
                         # to avoid growing the hash table from 0 size and also
@@ -315,7 +334,7 @@ class Cache:
                         chunk_idx = archive_chunk_idx
                         chunk_idx = archive_chunk_idx
                     else:
                     else:
                         chunk_idx.merge(archive_chunk_idx)
                         chunk_idx.merge(archive_chunk_idx)
-            print('Done.')
+            logger.info('Done.')
             return chunk_idx
             return chunk_idx
 
 
         def legacy_cleanup():
         def legacy_cleanup():

+ 16 - 10
borg/helpers.py

@@ -157,14 +157,20 @@ class Statistics:
             self.usize += csize
             self.usize += csize
 
 
     def print_(self, label, cache):
     def print_(self, label, cache):
-        total_size, total_csize, unique_size, unique_csize, total_unique_chunks, total_chunks = cache.chunks.summarize()
-        print()
-        print('                       Original size      Compressed size    Deduplicated size')
-        print('%-15s %20s %20s %20s' % (label, format_file_size(self.osize), format_file_size(self.csize), format_file_size(self.usize)))
-        print('All archives:   %20s %20s %20s' % (format_file_size(total_size), format_file_size(total_csize), format_file_size(unique_csize)))
-        print()
-        print('                       Unique chunks         Total chunks')
-        print('Chunk index:    %20d %20d' % (total_unique_chunks, total_chunks))
+        buf = str(self) % label
+        buf += "\n"
+        buf += str(cache)
+        return buf
+
+    def __str__(self):
+        return format(self, """\
+                       Original size      Compressed size    Deduplicated size
+%-15s {0.osize:>20s} {0.csize:>20s} {0.usize:>20s}""")
+
+    def __format__(self, format_spec):
+        fields = ['osize', 'csize', 'usize']
+        FormattedStats = namedtuple('FormattedStats', fields)
+        return format_spec.format(FormattedStats(*map(format_file_size, [ getattr(self, x) for x in fields ])))
 
 
     def show_progress(self, item=None, final=False):
     def show_progress(self, item=None, final=False):
         if not final:
         if not final:
@@ -175,8 +181,8 @@ class Statistics:
                 format_file_size(self.osize), format_file_size(self.csize), format_file_size(self.usize), path)
                 format_file_size(self.osize), format_file_size(self.csize), format_file_size(self.usize), path)
         else:
         else:
             msg = ' ' * 79
             msg = ' ' * 79
-        print(msg, end='\r')
-        sys.stdout.flush()
+        print(msg, file=sys.stderr, end='\r')
+        sys.stderr.flush()
 
 
 
 
 def get_keys_dir():
 def get_keys_dir():

+ 12 - 9
borg/key.py

@@ -7,6 +7,9 @@ import hmac
 from hashlib import sha256
 from hashlib import sha256
 
 
 from .helpers import IntegrityError, get_keys_dir, Error, have_cython
 from .helpers import IntegrityError, get_keys_dir, Error, have_cython
+from .logger import create_logger
+logger = create_logger()
+
 if have_cython():
 if have_cython():
     from .crypto import pbkdf2_sha256, get_random_bytes, AES, bytes_to_long, long_to_bytes, bytes_to_int, num_aes_blocks
     from .crypto import pbkdf2_sha256, get_random_bytes, AES, bytes_to_long, long_to_bytes, bytes_to_int, num_aes_blocks
     from .compress import Compressor, COMPR_BUFFER
     from .compress import Compressor, COMPR_BUFFER
@@ -89,7 +92,7 @@ class PlaintextKey(KeyBase):
 
 
     @classmethod
     @classmethod
     def create(cls, repository, args):
     def create(cls, repository, args):
-        print('Encryption NOT enabled.\nUse the "--encryption=repokey|keyfile|passphrase" to enable encryption.')
+        logger.info('Encryption NOT enabled.\nUse the "--encryption=repokey|keyfile|passphrase" to enable encryption.')
         return cls(repository)
         return cls(repository)
 
 
     @classmethod
     @classmethod
@@ -191,12 +194,12 @@ class Passphrase(str):
             if allow_empty or passphrase:
             if allow_empty or passphrase:
                 passphrase2 = cls.getpass('Enter same passphrase again: ')
                 passphrase2 = cls.getpass('Enter same passphrase again: ')
                 if passphrase == passphrase2:
                 if passphrase == passphrase2:
-                    print('Remember your passphrase. Your data will be inaccessible without it.')
+                    logger.info('Remember your passphrase. Your data will be inaccessible without it.')
                     return passphrase
                     return passphrase
                 else:
                 else:
-                    print('Passphrases do not match')
+                    print('Passphrases do not match', file=sys.stderr)
             else:
             else:
-                print('Passphrase must not be blank')
+                print('Passphrase must not be blank', file=sys.stderr)
 
 
     def __repr__(self):
     def __repr__(self):
         return '<Passphrase "***hidden***">'
         return '<Passphrase "***hidden***">'
@@ -216,8 +219,8 @@ class PassphraseKey(AESKeyBase):
     @classmethod
     @classmethod
     def create(cls, repository, args):
     def create(cls, repository, args):
         key = cls(repository)
         key = cls(repository)
-        print('WARNING: "passphrase" mode is deprecated and will be removed in 1.0.')
-        print('If you want something similar (but with less issues), use "repokey" mode.')
+        logger.warning('WARNING: "passphrase" mode is deprecated and will be removed in 1.0.')
+        logger.warning('If you want something similar (but with less issues), use "repokey" mode.')
         passphrase = Passphrase.new(allow_empty=False)
         passphrase = Passphrase.new(allow_empty=False)
         key.init(repository, passphrase)
         key.init(repository, passphrase)
         return key
         return key
@@ -325,7 +328,7 @@ class KeyfileKeyBase(AESKeyBase):
     def change_passphrase(self):
     def change_passphrase(self):
         passphrase = Passphrase.new(allow_empty=True)
         passphrase = Passphrase.new(allow_empty=True)
         self.save(self.target, passphrase)
         self.save(self.target, passphrase)
-        print('Key updated')
+        logger.info('Key updated')
 
 
     @classmethod
     @classmethod
     def create(cls, repository, args):
     def create(cls, repository, args):
@@ -336,8 +339,8 @@ class KeyfileKeyBase(AESKeyBase):
         key.init_ciphers()
         key.init_ciphers()
         target = key.get_new_target(args)
         target = key.get_new_target(args)
         key.save(target, passphrase)
         key.save(target, passphrase)
-        print('Key in "%s" created.' % target)
-        print('Keep this key safe. Your data will be inaccessible without it.')
+        logger.info('Key in "%s" created.' % target)
+        logger.info('Keep this key safe. Your data will be inaccessible without it.')
         return key
         return key
 
 
     def save(self, target, passphrase):
     def save(self, target, passphrase):

+ 91 - 0
borg/logger.py

@@ -0,0 +1,91 @@
+"""logging facilities
+
+The way to use this is as follows:
+
+* each module declares its own logger, using:
+
+    from .logger import create_logger
+    logger = create_logger()
+
+* then each module uses logger.info/warning/debug/etc according to the
+  level it believes is appropriate:
+
+    logger.debug('some intricate details you usually do not care about')
+    logger.info('verbose progress information')
+    logger.warning('some non-error condition that must always be reported')
+    logger.error('a fatal error')
+
+  ... and so on. see the `logging documentation
+  <https://docs.python.org/3/howto/logging.html#when-to-use-logging>`_
+  for more information
+
+* console interaction happens on stderr, that includes interactive
+  reporting functions like `help`, `info` and `list`
+
+* ...except ``input()`` is special, because we can't control the
+  stream it is using, unfortunately. we assume that it won't clutter
+  stdout, because interaction would be broken then anyways
+
+* advanced verbosity filters, based on what i described in
+  https://github.com/borgbackup/borg/pull/233#issuecomment-145100222
+  may eventually be implemented
+"""
+
+import inspect
+import logging
+import sys
+
+
+def setup_logging(args, stream=None):
+    """setup logging module according to the arguments provided
+
+    this sets up a stream handler logger on stderr (by default, if no
+    stream is provided) and verbosity levels.
+    """
+    logging.raiseExceptions = False
+    l = logging.getLogger('')
+    sh = logging.StreamHandler(stream)
+    # other formatters will probably want this, but let's remove
+    # clutter on stderr
+    # example:
+    # sh.setFormatter(logging.Formatter('%(name)s: %(message)s'))
+    l.addHandler(sh)
+    levels = {None: logging.WARNING,
+              0: logging.WARNING,
+              1: logging.INFO,
+              2: logging.DEBUG}
+    # default to WARNING, -v goes to INFO and -vv to DEBUG
+    l.setLevel(levels[args.verbose])
+    return sh,
+
+
+def find_parent_module():
+    """find the name of a the first module calling this module
+
+    if we cannot find it, we return the current module's name
+    (__name__) instead.
+    """
+    try:
+        frame = inspect.currentframe().f_back
+        module = inspect.getmodule(frame)
+        while module is None or module.__name__ == __name__:
+            frame = frame.f_back
+            module = inspect.getmodule(frame)
+        return module.__name__
+    except AttributeError:
+        # somehow we failed to find our module
+        # return the logger module name by default
+        return __name__
+
+
+def create_logger(name=None):
+    """create a Logger object with the proper path, which is returned by
+    find_parent_module() by default, or is provided via the commandline
+
+    this is really a shortcut for:
+
+        logger = logging.getLogger(__name__)
+
+    we use it to avoid errors and provide a more standard API.
+    """
+    return logging.getLogger(name or find_parent_module())

+ 5 - 2
borg/repository.py

@@ -2,6 +2,9 @@ from configparser import RawConfigParser
 from binascii import hexlify
 from binascii import hexlify
 from itertools import islice
 from itertools import islice
 import errno
 import errno
+import logging
+logger = logging.getLogger(__name__)
+
 import os
 import os
 import shutil
 import shutil
 import struct
 import struct
@@ -279,7 +282,7 @@ class Repository:
         def report_error(msg):
         def report_error(msg):
             nonlocal error_found
             nonlocal error_found
             error_found = True
             error_found = True
-            print(msg, file=sys.stderr)
+            logger.error(msg)
 
 
         assert not self._active_txn
         assert not self._active_txn
         try:
         try:
@@ -551,7 +554,7 @@ class LoggedIO:
         with open(filename, 'rb') as fd:
         with open(filename, 'rb') as fd:
             data = memoryview(fd.read())
             data = memoryview(fd.read())
         os.rename(filename, filename + '.beforerecover')
         os.rename(filename, filename + '.beforerecover')
-        print('attempting to recover ' + filename, file=sys.stderr)
+        logger.info('attempting to recover ' + filename)
         with open(filename, 'wb') as fd:
         with open(filename, 'wb') as fd:
             fd.write(MAGIC)
             fd.write(MAGIC)
             while len(data) >= self.header_fmt.size:
             while len(data) >= self.header_fmt.size:

+ 44 - 0
borg/testsuite/logger.py

@@ -0,0 +1,44 @@
+import logging
+from io import StringIO
+import sys
+
+from mock import Mock
+import pytest
+
+from ..logger import find_parent_module, create_logger, setup_logging
+logger = create_logger()
+
+
+@pytest.fixture()
+def io_logger():
+    io = StringIO()
+    args = Mock()
+    args.verbose = 2
+    assert args.verbose == 2
+    handler, = setup_logging(args, io)
+    handler.setFormatter(logging.Formatter('%(name)s: %(message)s'))
+    logger.setLevel(logging.DEBUG)
+    return io
+
+
+def test_setup_logging(io_logger):
+    logger.info('hello world')
+    assert io_logger.getvalue() == "borg.testsuite.logger: hello world\n"
+
+
+def test_multiple_loggers(io_logger):
+    logger = logging.getLogger(__name__)
+    logger.info('hello world 1')
+    assert io_logger.getvalue() == "borg.testsuite.logger: hello world 1\n"
+    logger = logging.getLogger('borg.testsuite.logger')
+    logger.info('hello world 2')
+    assert io_logger.getvalue() == "borg.testsuite.logger: hello world 1\nborg.testsuite.logger: hello world 2\n"
+    io_logger.truncate(0)
+    io_logger.seek(0)
+    logger = logging.getLogger('borg.testsuite.logger')
+    logger.info('hello world 2')
+    assert io_logger.getvalue() == "borg.testsuite.logger: hello world 2\n"
+
+
+def test_parent_module():
+    assert find_parent_module() == __name__

+ 0 - 2
borg/testsuite/upgrader.py

@@ -77,7 +77,6 @@ def test_convert_segments(tmpdir, attic_repo):
     """
     """
     # check should fail because of magic number
     # check should fail because of magic number
     assert not repo_valid(tmpdir)
     assert not repo_valid(tmpdir)
-    print("opening attic repository with borg and converting")
     repo = AtticRepositoryUpgrader(str(tmpdir), create=False)
     repo = AtticRepositoryUpgrader(str(tmpdir), create=False)
     segments = [filename for i, filename in repo.io.segment_iterator()]
     segments = [filename for i, filename in repo.io.segment_iterator()]
     repo.close()
     repo.close()
@@ -156,7 +155,6 @@ def test_convert_all(tmpdir, attic_repo, attic_key_file):
     """
     """
     # check should fail because of magic number
     # check should fail because of magic number
     assert not repo_valid(tmpdir)
     assert not repo_valid(tmpdir)
-    print("opening attic repository with borg and converting")
     repo = AtticRepositoryUpgrader(str(tmpdir), create=False)
     repo = AtticRepositoryUpgrader(str(tmpdir), create=False)
     repo.upgrade(dryrun=False)
     repo.upgrade(dryrun=False)
     assert key_valid(attic_key_file.path)
     assert key_valid(attic_key_file.path)

+ 17 - 13
borg/upgrader.py

@@ -1,6 +1,9 @@
 from binascii import hexlify
 from binascii import hexlify
+import logging
+logger = logging.getLogger(__name__)
 import os
 import os
 import shutil
 import shutil
+import sys
 import time
 import time
 
 
 from .helpers import get_keys_dir, get_cache_dir
 from .helpers import get_keys_dir, get_cache_dir
@@ -23,14 +26,14 @@ class AtticRepositoryUpgrader(Repository):
         we nevertheless do the order in reverse, as we prefer to do
         we nevertheless do the order in reverse, as we prefer to do
         the fast stuff first, to improve interactivity.
         the fast stuff first, to improve interactivity.
         """
         """
-        print("reading segments from attic repository using borg")
-        # we need to open it to load the configuration and other fields
+        logger.info("opening attic repository with borg and converting")
+        # we need to open the repo to load configuration, keyfiles and segments
         self.open(self.path, exclusive=False)
         self.open(self.path, exclusive=False)
         segments = [filename for i, filename in self.io.segment_iterator()]
         segments = [filename for i, filename in self.io.segment_iterator()]
         try:
         try:
             keyfile = self.find_attic_keyfile()
             keyfile = self.find_attic_keyfile()
         except KeyfileNotFoundError:
         except KeyfileNotFoundError:
-            print("no key file found for repository")
+            logger.warning("no key file found for repository")
         else:
         else:
             self.convert_keyfiles(keyfile, dryrun)
             self.convert_keyfiles(keyfile, dryrun)
         self.close()
         self.close()
@@ -53,17 +56,18 @@ class AtticRepositoryUpgrader(Repository):
 
 
         luckily the magic string length didn't change so we can just
         luckily the magic string length didn't change so we can just
         replace the 8 first bytes of all regular files in there."""
         replace the 8 first bytes of all regular files in there."""
-        print("converting %d segments..." % len(segments))
+        logger.info("converting %d segments..." % len(segments))
         i = 0
         i = 0
         for filename in segments:
         for filename in segments:
             i += 1
             i += 1
             print("\rconverting segment %d/%d in place, %.2f%% done (%s)"
             print("\rconverting segment %d/%d in place, %.2f%% done (%s)"
-                  % (i, len(segments), 100*float(i)/len(segments), filename), end='')
+                  % (i, len(segments), 100*float(i)/len(segments), filename),
+                  end='', file=sys.stderr)
             if dryrun:
             if dryrun:
                 time.sleep(0.001)
                 time.sleep(0.001)
             else:
             else:
                 AtticRepositoryUpgrader.header_replace(filename, ATTIC_MAGIC, MAGIC)
                 AtticRepositoryUpgrader.header_replace(filename, ATTIC_MAGIC, MAGIC)
-        print()
+        print(file=sys.stderr)
 
 
     @staticmethod
     @staticmethod
     def header_replace(filename, old_magic, new_magic):
     def header_replace(filename, old_magic, new_magic):
@@ -107,12 +111,12 @@ class AtticRepositoryUpgrader(Repository):
         key file because magic string length changed, but that's not a
         key file because magic string length changed, but that's not a
         problem because the keyfiles are small (compared to, say,
         problem because the keyfiles are small (compared to, say,
         all the segments)."""
         all the segments)."""
-        print("converting keyfile %s" % keyfile)
+        logger.info("converting keyfile %s" % keyfile)
         with open(keyfile, 'r') as f:
         with open(keyfile, 'r') as f:
             data = f.read()
             data = f.read()
         data = data.replace(AtticKeyfileKey.FILE_ID, KeyfileKey.FILE_ID, 1)
         data = data.replace(AtticKeyfileKey.FILE_ID, KeyfileKey.FILE_ID, 1)
         keyfile = os.path.join(get_keys_dir(), os.path.basename(keyfile))
         keyfile = os.path.join(get_keys_dir(), os.path.basename(keyfile))
-        print("writing borg keyfile to %s" % keyfile)
+        logger.info("writing borg keyfile to %s" % keyfile)
         if not dryrun:
         if not dryrun:
             with open(keyfile, 'w') as f:
             with open(keyfile, 'w') as f:
                 f.write(data)
                 f.write(data)
@@ -138,7 +142,7 @@ class AtticRepositoryUpgrader(Repository):
         caches = []
         caches = []
         transaction_id = self.get_index_transaction_id()
         transaction_id = self.get_index_transaction_id()
         if transaction_id is None:
         if transaction_id is None:
-            print('no index file found for repository %s' % self.path)
+            logger.warning('no index file found for repository %s' % self.path)
         else:
         else:
             caches += [os.path.join(self.path, 'index.%d' % transaction_id).encode('utf-8')]
             caches += [os.path.join(self.path, 'index.%d' % transaction_id).encode('utf-8')]
 
 
@@ -168,14 +172,14 @@ class AtticRepositoryUpgrader(Repository):
             if os.path.exists(attic_file):
             if os.path.exists(attic_file):
                 borg_file = os.path.join(borg_cache_dir, path)
                 borg_file = os.path.join(borg_cache_dir, path)
                 if os.path.exists(borg_file):
                 if os.path.exists(borg_file):
-                    print("borg cache file already exists in %s, skipping conversion of %s" % (borg_file, attic_file))
+                    logger.warning("borg cache file already exists in %s, skipping conversion of %s" % (borg_file, attic_file))
                 else:
                 else:
-                    print("copying attic cache file from %s to %s" % (attic_file, borg_file))
+                    logger.info("copying attic cache file from %s to %s" % (attic_file, borg_file))
                     if not dryrun:
                     if not dryrun:
                         shutil.copyfile(attic_file, borg_file)
                         shutil.copyfile(attic_file, borg_file)
                     return borg_file
                     return borg_file
             else:
             else:
-                print("no %s cache file found in %s" % (path, attic_file))
+                logger.warning("no %s cache file found in %s" % (path, attic_file))
             return None
             return None
 
 
         # XXX: untested, because generating cache files is a PITA, see
         # XXX: untested, because generating cache files is a PITA, see
@@ -192,7 +196,7 @@ class AtticRepositoryUpgrader(Repository):
             for cache in ['chunks']:
             for cache in ['chunks']:
                 copied = copy_cache_file(cache)
                 copied = copy_cache_file(cache)
                 if copied:
                 if copied:
-                    print("converting cache %s" % cache)
+                    logger.info("converting cache %s" % cache)
                     if not dryrun:
                     if not dryrun:
                         AtticRepositoryUpgrader.header_replace(cache, b'ATTICIDX', b'BORG_IDX')
                         AtticRepositoryUpgrader.header_replace(cache, b'ATTICIDX', b'BORG_IDX')