瀏覽代碼

info: --json for archives

Marian Beermann 8 年之前
父節點
當前提交
6180f5055c
共有 2 個文件被更改,包括 53 次插入24 次删除
  1. 24 6
      src/borg/archive.py
  2. 29 18
      src/borg/archiver.py

+ 24 - 6
src/borg/archive.py

@@ -290,7 +290,8 @@ class Archive:
         self.end = end
         self.consider_part_files = consider_part_files
         self.pipeline = DownloadPipeline(self.repository, self.key)
-        if create:
+        self.create = create
+        if self.create:
             self.file_compression_logger = create_logger('borg.debug.file-compression')
             self.items_buffer = CacheChunkBuffer(self.cache, self.key, self.stats)
             self.chunker = Chunker(self.key.chunk_seed, *chunker_params)
@@ -352,17 +353,34 @@ class Archive:
         return format_timedelta(self.ts_end - self.ts)
 
     def info(self):
-        return {
+        if self.create:
+            stats = self.stats
+            start = self.start.replace(tzinfo=timezone.utc)
+            end = self.end.replace(tzinfo=timezone.utc)
+        else:
+            stats = self.calc_stats(self.cache)
+            start = self.ts
+            end = self.ts_end
+        info = {
             'name': self.name,
             'id': self.fpr,
-            'start': format_time(to_localtime(self.start.replace(tzinfo=timezone.utc))),
-            'end': format_time(to_localtime(self.end.replace(tzinfo=timezone.utc))),
-            'duration': (self.end - self.start).total_seconds(),
-            'nfiles': self.stats.nfiles,
+            'start': format_time(to_localtime(start)),
+            'end': format_time(to_localtime(end)),
+            'duration': (end - start).total_seconds(),
+            'stats': stats.as_dict(),
             'limits': {
                 'max_archive_size': self.cache.chunks[self.id].csize / MAX_DATA_SIZE,
             },
         }
+        if self.create:
+            info['command_line'] = sys.argv
+        else:
+            info.update({
+                'command_line': self.metadata.cmdline,
+                'hostname': self.metadata.hostname,
+                'username': self.metadata.username,
+            })
+        return info
 
     def __str__(self):
         return '''\

+ 29 - 18
src/borg/archiver.py

@@ -390,7 +390,6 @@ class Archiver:
                         print_as_json({
                             'repository': repository,
                             'cache': cache,
-                            'stats': archive.stats.as_dict(),
                             'archive': archive,
                         })
                     else:
@@ -1002,29 +1001,41 @@ class Archiver:
             if not archive_names:
                 return self.exit_code
 
+        output_data = []
+
         for i, archive_name in enumerate(archive_names, 1):
             archive = Archive(repository, key, manifest, archive_name, cache=cache,
                               consider_part_files=args.consider_part_files)
-            stats = archive.calc_stats(cache)
-            print('Archive name: %s' % archive.name)
-            print('Archive fingerprint: %s' % archive.fpr)
-            print('Comment: %s' % archive.metadata.get('comment', ''))
-            print('Hostname: %s' % archive.metadata.hostname)
-            print('Username: %s' % archive.metadata.username)
-            print('Time (start): %s' % format_time(to_localtime(archive.ts)))
-            print('Time (end):   %s' % format_time(to_localtime(archive.ts_end)))
-            print('Duration: %s' % archive.duration_from_meta)
-            print('Number of files: %d' % stats.nfiles)
-            print('Command line: %s' % format_cmdline(archive.metadata.cmdline))
-            print('Utilization of max. archive size: %d%%' % (100 * cache.chunks[archive.id].csize / MAX_DATA_SIZE))
-            print(DASHES)
-            print(STATS_HEADER)
-            print(str(stats))
-            print(str(cache))
+            if args.json:
+                output_data.append(archive.info())
+            else:
+                stats = archive.calc_stats(cache)
+                print('Archive name: %s' % archive.name)
+                print('Archive fingerprint: %s' % archive.fpr)
+                print('Comment: %s' % archive.metadata.get('comment', ''))
+                print('Hostname: %s' % archive.metadata.hostname)
+                print('Username: %s' % archive.metadata.username)
+                print('Time (start): %s' % format_time(to_localtime(archive.ts)))
+                print('Time (end):   %s' % format_time(to_localtime(archive.ts_end)))
+                print('Duration: %s' % archive.duration_from_meta)
+                print('Number of files: %d' % stats.nfiles)
+                print('Command line: %s' % format_cmdline(archive.metadata.cmdline))
+                print('Utilization of max. archive size: %d%%' % (100 * cache.chunks[archive.id].csize / MAX_DATA_SIZE))
+                print(DASHES)
+                print(STATS_HEADER)
+                print(str(stats))
+                print(str(cache))
             if self.exit_code:
                 break
-            if len(archive_names) - i:
+            if not args.json and len(archive_names) - i:
                 print()
+
+        if args.json:
+            print_as_json({
+                'repository': repository,
+                'cache': cache,
+                'archives': output_data,
+            })
         return self.exit_code
 
     def _info_repository(self, args, repository, key, cache):