浏览代码

output more progress information

without this, there would be a solid 20 seconds here without any sort
of output on the console, regardless of the verbosity level. this
makes nice incremental messages telling the user that borg is not
stalled (or waiting for a lock, for that matter)

the "processing files" message is a little clunky, as we somewhat
abuse the cache to figure out if we are just starting... but it helps
if there are problems reading the actual files: it tells us the
initialization is basically complete and we're going ahead with the
reading of all the files.
Antoine Beaupré 9 年之前
父节点
当前提交
b120e5f119
共有 2 个文件被更改,包括 6 次插入0 次删除
  1. 3 0
      borg/archive.py
  2. 3 0
      borg/cache.py

+ 3 - 0
borg/archive.py

@@ -503,7 +503,10 @@ Number of files: {0.stats.nfiles}
             else:
                 self.hard_links[st.st_ino, st.st_dev] = safe_path
         path_hash = self.key.id_hash(os.path.join(self.cwd, path).encode('utf-8', 'surrogateescape'))
+        first_run = not cache.files
         ids = cache.file_known_and_unchanged(path_hash, st)
+        if first_run:
+            logger.info('processing files')
         chunks = None
         if ids is not None:
             # Make sure all ids are available

+ 3 - 0
borg/cache.py

@@ -48,6 +48,7 @@ class Cache:
         self.manifest = manifest
         self.path = path or os.path.join(get_cache_dir(), hexlify(repository.id).decode('ascii'))
         self.do_files = do_files
+        logger.info('initializing cache')
         # Warn user before sending data to a never seen before unencrypted repository
         if not os.path.exists(self.path):
             if warn_if_unencrypted and isinstance(key, PlaintextKey):
@@ -69,6 +70,7 @@ class Cache:
             # Make sure an encrypted repository has not been swapped for an unencrypted repository
             if self.key_type is not None and self.key_type != str(key.TYPE):
                 raise self.EncryptionMethodMismatch()
+            logger.info('synchronizing cache')
             self.sync()
             self.commit()
 
@@ -163,6 +165,7 @@ Chunk index:    {0.total_unique_chunks:20d} {0.total_chunks:20d}""")
     def _read_files(self):
         self.files = {}
         self._newest_mtime = 0
+        logger.info('reading files cache')
         with open(os.path.join(self.path, 'files'), 'rb') as fd:
             u = msgpack.Unpacker(use_list=True)
             while True: