소스 검색

Merge pull request #1447 from enkore/f/debug-topic

Debugging topics and some other minor stuff
TW 8 년 전
부모
커밋
ff5d9a3f12
4개의 변경된 파일25개의 추가작업 그리고 9개의 파일을 삭제
  1. 3 2
      src/borg/archive.py
  2. 13 4
      src/borg/archiver.py
  3. 4 3
      src/borg/helpers.py
  4. 5 0
      src/borg/logger.py

+ 3 - 2
src/borg/archive.py

@@ -252,6 +252,7 @@ class Archive:
         self.consider_part_files = consider_part_files
         self.pipeline = DownloadPipeline(self.repository, self.key)
         if create:
+            self.file_compression_logger = create_logger('borg.debug.file-compression')
             self.items_buffer = CacheChunkBuffer(self.cache, self.key, self.stats)
             self.chunker = Chunker(self.key.chunk_seed, *chunker_params)
             self.compression_decider1 = CompressionDecider1(compression or CompressionSpec('none'),
@@ -796,7 +797,7 @@ Number of files: {0.stats.nfiles}'''.format(
             # there should be no information in the cache about special files processed in
             # read-special mode, but we better play safe as this was wrong in the past:
             path_hash = ids = None
-        first_run = not cache.files
+        first_run = not cache.files and cache.do_files
         if first_run:
             logger.debug('Processing files ...')
         chunks = None
@@ -820,7 +821,7 @@ Number of files: {0.stats.nfiles}'''.format(
             item.chunks = chunks
         else:
             compress = self.compression_decider1.decide(path)
-            logger.debug('%s -> compression %s', path, compress['name'])
+            self.file_compression_logger.debug('%s -> compression %s', path, compress['name'])
             with backup_io():
                 fh = Archive._open_rb(path)
             with os.fdopen(fh, 'rb') as fd:

+ 13 - 4
src/borg/archiver.py

@@ -1270,6 +1270,10 @@ class Archiver:
         common_group.add_argument('--debug', dest='log_level',
                                   action='store_const', const='debug', default='warning',
                                   help='enable debug output, work on log level DEBUG')
+        common_group.add_argument('--debug-topic', dest='debug_topics',
+                                  action='append', metavar='TOPIC', default=[],
+                                  help='enable TOPIC debugging (can be specified multiple times). '
+                                       'The logger path is borg.debug.<TOPIC> if TOPIC is not fully qualified.')
         common_group.add_argument('--lock-wait', dest='lock_wait', type=int, metavar='N', default=1,
                                   help='wait for the lock, but max. N seconds (default: %(default)d).')
         common_group.add_argument('--show-version', dest='show_version', action='store_true', default=False,
@@ -1409,10 +1413,6 @@ class Archiver:
         tamper-resistant as well, unless the attacker has access to the keys.
 
         It is also very slow.
-
-        --verify-data only verifies data used by the archives specified with --last,
-        --prefix or an explicitly named archive. If none of these are passed,
-        all data in the repository is verified.
         """)
         subparser = subparsers.add_parser('check', parents=[common_parser], add_help=False,
                                           description=self.do_check.__doc__,
@@ -2263,11 +2263,20 @@ class Archiver:
             if args.get(option, False):
                 logging.getLogger(logger_name).setLevel('INFO')
 
+    def _setup_topic_debugging(self, args):
+        """Turn on DEBUG level logging for specified --debug-topics."""
+        for topic in args.debug_topics:
+            if '.' not in topic:
+                topic = 'borg.debug.' + topic
+            logger.debug('Enabling debug topic %s', topic)
+            logging.getLogger(topic).setLevel('DEBUG')
+
     def run(self, args):
         os.umask(args.umask)  # early, before opening files
         self.lock_wait = args.lock_wait
         setup_logging(level=args.log_level, is_serve=args.func == self.do_serve)  # do not use loggers before this!
         self._setup_implied_logging(vars(args))
+        self._setup_topic_debugging(args)
         if args.show_version:
             logging.getLogger('borg.output.show-version').info('borgbackup version %s' % __version__)
         self.prerun_checks(logger)

+ 4 - 3
src/borg/helpers.py

@@ -598,8 +598,7 @@ def format_time(t):
 def format_timedelta(td):
     """Format timedelta in a human friendly format
     """
-    # Since td.total_seconds() requires python 2.7
-    ts = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6)
+    ts = td.total_seconds()
     s = ts % 60
     m = int(ts / 60) % 60
     h = int(ts / 3600) % 24
@@ -1528,6 +1527,8 @@ class CompressionDecider1:
 
 
 class CompressionDecider2:
+    logger = create_logger('borg.debug.file-compression')
+
     def __init__(self, compression):
         self.compression = compression
 
@@ -1557,7 +1558,7 @@ class CompressionDecider2:
             # that marks such data as uncompressible via compression-type metadata.
             compr_spec = CompressionSpec('none')
         compr_args.update(compr_spec)
-        logger.debug("len(data) == %d, len(lz4(data)) == %d, choosing %s", data_len, cdata_len, compr_spec)
+        self.logger.debug("len(data) == %d, len(lz4(data)) == %d, choosing %s", data_len, cdata_len, compr_spec)
         return compr_args, Chunk(data, **meta)
 
 

+ 5 - 0
src/borg/logger.py

@@ -149,8 +149,13 @@ def create_logger(name=None):
                 if not configured:
                     raise Exception("tried to call a logger before setup_logging() was called")
                 self.__real_logger = logging.getLogger(self.__name)
+                if self.__name.startswith('borg.debug.') and self.__real_logger.level == logging.NOTSET:
+                    self.__real_logger.setLevel('WARNING')
             return self.__real_logger
 
+        def getChild(self, suffix):
+            return LazyLogger(self.__name + '.' + suffix)
+
         def setLevel(self, *args, **kw):
             return self.__logger.setLevel(*args, **kw)