Browse Source

Merge pull request #5780 from ThomasWaldmann/show-early

print preliminary file status early, fixes #5417
TW 4 years ago
parent
commit
99065a4034
2 changed files with 15 additions and 9 deletions
  1. 12 3
      src/borg/archive.py
  2. 3 6
      src/borg/archiver.py

+ 12 - 3
src/borg/archive.py

@@ -1222,13 +1222,14 @@ class FilesystemObjectProcessors:
     def __init__(self, *, metadata_collector, cache, key,
                  add_item, process_file_chunks,
                  chunker_params, show_progress, sparse,
-                 log_json, iec):
+                 log_json, iec, file_status_printer=None):
         self.metadata_collector = metadata_collector
         self.cache = cache
         self.key = key
         self.add_item = add_item
         self.process_file_chunks = process_file_chunks
         self.show_progress = show_progress
+        self.print_file_status = file_status_printer or (lambda *args: None)
 
         self.hard_links = {}
         self.stats = Statistics(output_json=log_json, iec=iec)  # threading: done by cache (including progress)
@@ -1301,6 +1302,9 @@ class FilesystemObjectProcessors:
             return status
 
     def process_pipe(self, *, path, cache, fd, mode, user, group):
+        status = 'i'  # stdin (or other pipe)
+        self.print_file_status(status, path)
+        status = None  # we already printed the status
         uid = user2uid(user)
         if uid is None:
             raise Error("no such user: %s" % user)
@@ -1319,7 +1323,7 @@ class FilesystemObjectProcessors:
         item.get_size(memorize=True)
         self.stats.nfiles += 1
         self.add_item(item, stats=self.stats)
-        return 'i'  # stdin
+        return status
 
     def process_file(self, *, path, parent_fd, name, st, cache, flags=flags_normal):
         with self.create_helper(path, st, None) as (item, status, hardlinked, hardlink_master):  # no status yet
@@ -1356,6 +1360,8 @@ class FilesystemObjectProcessors:
                             status = 'U'  # regular file, unchanged
                     else:
                         status = 'M' if known else 'A'  # regular file, modified or added
+                    self.print_file_status(status, path)
+                    status = None  # we already printed the status
                     item.hardlink_master = hardlinked
                     # Only chunkify the file if needed
                     if chunks is not None:
@@ -2018,11 +2024,14 @@ class ArchiveRecreater:
             target.stats.show_progress(final=True)
 
     def process_item(self, archive, target, item):
+        status = file_status(item.mode)
         if 'chunks' in item:
+            self.print_file_status(status, item.path)
+            status = None
             self.process_chunks(archive, target, item)
             target.stats.nfiles += 1
         target.add_item(item, stats=target.stats)
-        self.print_file_status(file_status(item.mode), item.path)
+        self.print_file_status(status, item.path)
 
     def process_chunks(self, archive, target, item):
         if not self.recompress and not target.recreate_rechunkify:

+ 3 - 6
src/borg/archiver.py

@@ -235,7 +235,8 @@ class Archiver:
         logger.warning(msg)
 
     def print_file_status(self, status, path):
-        if self.output_list and (self.output_filter is None or status in self.output_filter):
+        # if we get called with status == None, the final file status was already printed
+        if self.output_list and status is not None and (self.output_filter is None or status in self.output_filter):
             if self.log_json:
                 print(json.dumps({
                     'type': 'file_status',
@@ -562,8 +563,6 @@ class Archiver:
                         status = 'E'
                     if status == 'C':
                         self.print_warning('%s: file changed while we backed it up', path)
-                    if status is None:
-                        status = '?'
                     self.print_file_status(status, path)
                 if args.paths_from_command:
                     rc = proc.wait()
@@ -664,7 +663,7 @@ class Archiver:
                 fso = FilesystemObjectProcessors(metadata_collector=metadata_collector, cache=cache, key=key,
                     process_file_chunks=cp.process_file_chunks, add_item=archive.add_item,
                     chunker_params=args.chunker_params, show_progress=args.progress, sparse=args.sparse,
-                    log_json=args.log_json, iec=args.iec)
+                    log_json=args.log_json, iec=args.iec, file_status_printer=self.print_file_status)
                 create_inner(archive, cache, fso)
         else:
             create_inner(None, None, None)
@@ -820,8 +819,6 @@ class Archiver:
             status = 'E'
         if status == 'C':
             self.print_warning('%s: file changed while we backed it up', path)
-        if status is None:
-            status = '?'  # need to add a status code somewhere
         if not recurse_excluded_dir:
             self.print_file_status(status, path)