2
0
Thomas Waldmann 9 сар өмнө
parent
commit
dcde48490e

+ 0 - 1
src/borg/archiver/rinfo_cmd.py

@@ -64,7 +64,6 @@ class RInfoMixIn:
             output += "Security dir: {security_dir}\n".format(**info)
             output += "Security dir: {security_dir}\n".format(**info)
 
 
             print(output)
             print(output)
-            print(str(cache))
 
 
     def build_parser_rinfo(self, subparsers, common_parser, mid_common_parser):
     def build_parser_rinfo(self, subparsers, common_parser, mid_common_parser):
         from ._common import process_epilog
         from ._common import process_epilog

+ 2 - 37
src/borg/cache.py

@@ -381,32 +381,6 @@ class Cache:
         return adhoc() if prefer_adhoc_cache else adhocwithfiles()
         return adhoc() if prefer_adhoc_cache else adhocwithfiles()
 
 
 
 
-class CacheStatsMixin:
-    str_format = """\
-Original size: {0.total_size}
-Deduplicated size: {0.unique_size}
-Unique chunks: {0.total_unique_chunks}
-Total chunks: {0.total_chunks}
-"""
-
-    def __init__(self, iec=False):
-        self.iec = iec
-
-    def __str__(self):
-        return self.str_format.format(self.format_tuple())
-
-    Summary = namedtuple("Summary", ["total_size", "unique_size", "total_unique_chunks", "total_chunks"])
-
-    def stats(self):
-        return self.Summary(0, 0, 0, 0)._asdict()  # dummy to not cause crash with current code
-
-    def format_tuple(self):
-        stats = self.stats()
-        for field in ["total_size", "unique_size"]:
-            stats[field] = format_file_size(stats[field], iec=self.iec)
-        return self.Summary(**stats)
-
-
 class FilesCacheMixin:
 class FilesCacheMixin:
     """
     """
     Massively accelerate processing of unchanged files by caching their chunks list.
     Massively accelerate processing of unchanged files by caching their chunks list.
@@ -688,7 +662,7 @@ class ChunksMixin:
         return chunks
         return chunks
 
 
 
 
-class AdHocWithFilesCache(CacheStatsMixin, FilesCacheMixin, ChunksMixin):
+class AdHocWithFilesCache(FilesCacheMixin, ChunksMixin):
     """
     """
     Like AdHocCache, but with a files cache.
     Like AdHocCache, but with a files cache.
     """
     """
@@ -708,7 +682,6 @@ class AdHocWithFilesCache(CacheStatsMixin, FilesCacheMixin, ChunksMixin):
         :param lock_wait: timeout for lock acquisition (int [s] or None [wait forever])
         :param lock_wait: timeout for lock acquisition (int [s] or None [wait forever])
         :param cache_mode: what shall be compared in the file stat infos vs. cached stat infos comparison
         :param cache_mode: what shall be compared in the file stat infos vs. cached stat infos comparison
         """
         """
-        CacheStatsMixin.__init__(self, iec=iec)
         FilesCacheMixin.__init__(self, cache_mode)
         FilesCacheMixin.__init__(self, cache_mode)
         assert isinstance(manifest, Manifest)
         assert isinstance(manifest, Manifest)
         self.manifest = manifest
         self.manifest = manifest
@@ -850,7 +823,7 @@ class AdHocWithFilesCache(CacheStatsMixin, FilesCacheMixin, ChunksMixin):
         self.cache_config.mandatory_features.update(repo_features & my_features)
         self.cache_config.mandatory_features.update(repo_features & my_features)
 
 
 
 
-class AdHocCache(CacheStatsMixin, ChunksMixin):
+class AdHocCache(ChunksMixin):
     """
     """
     Ad-hoc, non-persistent cache.
     Ad-hoc, non-persistent cache.
 
 
@@ -859,15 +832,7 @@ class AdHocCache(CacheStatsMixin, ChunksMixin):
     Chunks that were not added during the current AdHocCache lifetime won't have correct size set
     Chunks that were not added during the current AdHocCache lifetime won't have correct size set
     (0 bytes) and will have an infinite reference count (MAX_VALUE).
     (0 bytes) and will have an infinite reference count (MAX_VALUE).
     """
     """
-
-    str_format = """\
-All archives:                unknown              unknown              unknown
-
-                       Unique chunks         Total chunks
-Chunk index:    {0.total_unique_chunks:20d}             unknown"""
-
     def __init__(self, manifest, warn_if_unencrypted=True, lock_wait=None, iec=False):
     def __init__(self, manifest, warn_if_unencrypted=True, lock_wait=None, iec=False):
-        CacheStatsMixin.__init__(self, iec=iec)
         assert isinstance(manifest, Manifest)
         assert isinstance(manifest, Manifest)
         self.manifest = manifest
         self.manifest = manifest
         self.repository = manifest.repository
         self.repository = manifest.repository

+ 2 - 2
src/borg/helpers/parseformat.py

@@ -1193,9 +1193,9 @@ class BorgJsonEncoder(json.JSONEncoder):
         if isinstance(o, Archive):
         if isinstance(o, Archive):
             return o.info()
             return o.info()
         if isinstance(o, (AdHocWithFilesCache, )):
         if isinstance(o, (AdHocWithFilesCache, )):
-            return {"path": o.path, "stats": o.stats()}
+            return {"path": o.path}
         if isinstance(o, AdHocCache):
         if isinstance(o, AdHocCache):
-            return {"stats": o.stats()}
+            return {}
         if callable(getattr(o, "to_json", None)):
         if callable(getattr(o, "to_json", None)):
             return o.to_json()
             return o.to_json()
         return super().default(o)
         return super().default(o)

+ 1 - 7
src/borg/testsuite/archiver/rinfo_cmd.py

@@ -1,5 +1,4 @@
 import json
 import json
-from random import randbytes
 
 
 from ...constants import *  # NOQA
 from ...constants import *  # NOQA
 from . import checkts, cmd, create_regular_file, generate_archiver_tests, RK_ENCRYPTION
 from . import checkts, cmd, create_regular_file, generate_archiver_tests, RK_ENCRYPTION
@@ -13,7 +12,7 @@ def test_info(archivers, request):
     cmd(archiver, "rcreate", RK_ENCRYPTION)
     cmd(archiver, "rcreate", RK_ENCRYPTION)
     cmd(archiver, "create", "test", "input")
     cmd(archiver, "create", "test", "input")
     info_repo = cmd(archiver, "rinfo")
     info_repo = cmd(archiver, "rinfo")
-    assert "Original size:" in info_repo
+    assert "Repository ID:" in info_repo
 
 
 
 
 def test_info_json(archivers, request):
 def test_info_json(archivers, request):
@@ -30,8 +29,3 @@ def test_info_json(archivers, request):
     checkts(repository["last_modified"])
     checkts(repository["last_modified"])
     assert info_repo["encryption"]["mode"] == RK_ENCRYPTION[13:]
     assert info_repo["encryption"]["mode"] == RK_ENCRYPTION[13:]
     assert "keyfile" not in info_repo["encryption"]
     assert "keyfile" not in info_repo["encryption"]
-
-    cache = info_repo["cache"]
-    stats = cache["stats"]
-    assert all(isinstance(o, int) for o in stats.values())
-    assert all(key in stats for key in ("total_chunks", "total_size", "total_unique_chunks", "unique_size"))