2
0
Эх сурвалжийг харах

use "part file", "part", etc. consistently

use .borg_part_N as filename to avoid collisions
Thomas Waldmann 9 жил өмнө
parent
commit
04ad1d1b0b

+ 3 - 3
docs/faq.rst

@@ -248,9 +248,9 @@ also care for deleting unneeded checkpoints.
 
 Note: the checkpointing mechanism creates hidden, partial files in an archive,
 so that checkpoints even work while a big file is being processed.
-They are named ``<filename>.checkpoint_<N>`` and all operations usually ignore
+They are named ``<filename>.borg_part_<N>`` and all operations usually ignore
 these files, but you can make them considered by giving the option
-``--consider-checkpoint-files``. You usually only need that option if you are
+``--consider-part-files``. You usually only need that option if you are
 really desperate (e.g. if you have no completed backup of that file and you'ld
 rather get a partial file extracted than nothing). You do **not** want to give
 that option under any normal circumstances.
@@ -264,7 +264,7 @@ How can I restore huge file(s) over a unstable connection?
 ----------------------------------------------------------
 
 If you can not manage to extract the whole big file in one go, you can extract
-all the checkpoint files (see above) and manually concatenate them together.
+all the part files (see above) and manually concatenate them together.
 
 If it crashes with a UnicodeError, what can I do?
 -------------------------------------------------

+ 7 - 8
src/borg/archive.py

@@ -232,7 +232,7 @@ class Archive:
     def __init__(self, repository, key, manifest, name, cache=None, create=False,
                  checkpoint_interval=300, numeric_owner=False, progress=False,
                  chunker_params=CHUNKER_PARAMS, start=None, end=None, compression=None, compression_files=None,
-                 consider_checkpoint_files=False):
+                 consider_part_files=False):
         self.cwd = os.getcwd()
         self.key = key
         self.repository = repository
@@ -251,7 +251,7 @@ class Archive:
         if end is None:
             end = datetime.utcnow()
         self.end = end
-        self.consider_checkpoint_files = consider_checkpoint_files
+        self.consider_part_files = consider_part_files
         self.pipeline = DownloadPipeline(self.repository, self.key)
         if create:
             self.items_buffer = CacheChunkBuffer(self.cache, self.key, self.stats)
@@ -330,8 +330,8 @@ Number of files: {0.stats.nfiles}'''.format(
         return 'Archive(%r)' % self.name
 
     def item_filter(self, item, filter=None):
-        if not self.consider_checkpoint_files and 'checkpoint' in item:
-            # this is a checkpoint (partial) file, we usually don't want to consider it.
+        if not self.consider_part_files and 'part' in item:
+            # this is a part(ial) file, we usually don't want to consider it.
             return False
         return filter(item) if filter else True
 
@@ -724,13 +724,12 @@ Number of files: {0.stats.nfiles}'''.format(
             length = len(item.chunks)
             # the item should only have the *additional* chunks we processed after the last partial item:
             item.chunks = item.chunks[from_chunk:]
-            item.path += '.checkpoint_%d' % number
-            item.checkpoint = number
+            item.path += '.borg_part_%d' % number
+            item.part = number
             number += 1
             self.add_item(item, show_progress=False)
             self.write_checkpoint()
-            # we have saved the checkpoint file, but we will reference the same
-            # chunks also from the final, complete file:
+            # we have saved the part file, but we will reference the same chunks also from the final, complete file:
             for chunk in item.chunks:
                 cache.chunk_incref(chunk.id, stats)
             return length, number

+ 7 - 7
src/borg/archiver.py

@@ -101,7 +101,7 @@ def with_archive(method):
     def wrapper(self, args, repository, key, manifest, **kwargs):
         archive = Archive(repository, key, manifest, args.location.archive,
                           numeric_owner=getattr(args, 'numeric_owner', False), cache=kwargs.get('cache'),
-                          consider_checkpoint_files=args.consider_checkpoint_files)
+                          consider_part_files=args.consider_part_files)
         return method(self, args, repository=repository, manifest=manifest, key=key, archive=archive, **kwargs)
     return wrapper
 
@@ -670,7 +670,7 @@ class Archiver:
 
         archive1 = archive
         archive2 = Archive(repository, key, manifest, args.archive2,
-                           consider_checkpoint_files=args.consider_checkpoint_files)
+                           consider_part_files=args.consider_part_files)
 
         can_compare_chunk_ids = archive1.metadata.get(b'chunker_params', False) == archive2.metadata.get(
             b'chunker_params', True) or args.same_chunker_params
@@ -756,7 +756,7 @@ class Archiver:
         with cache_if_remote(repository) as cached_repo:
             if args.location.archive:
                 archive = Archive(repository, key, manifest, args.location.archive,
-                                  consider_checkpoint_files=args.consider_checkpoint_files)
+                                  consider_part_files=args.consider_part_files)
             else:
                 archive = None
             operations = FuseOperations(key, repository, manifest, archive, cached_repo)
@@ -783,7 +783,7 @@ class Archiver:
             matcher, _ = self.build_matcher(args.excludes, args.paths)
             with Cache(repository, key, manifest, lock_wait=self.lock_wait) as cache:
                 archive = Archive(repository, key, manifest, args.location.archive, cache=cache,
-                                  consider_checkpoint_files=args.consider_checkpoint_files)
+                                  consider_part_files=args.consider_part_files)
 
                 if args.format:
                     format = args.format
@@ -986,7 +986,7 @@ class Archiver:
     def do_debug_dump_archive_items(self, args, repository, manifest, key):
         """dump (decrypted, decompressed) archive items metadata (not: data)"""
         archive = Archive(repository, key, manifest, args.location.archive,
-                          consider_checkpoint_files=args.consider_checkpoint_files)
+                          consider_part_files=args.consider_part_files)
         for i, item_id in enumerate(archive.metadata[b'items']):
             _, data = key.decrypt(item_id, repository.get(item_id))
             filename = '%06d_%s.items' % (i, bin_to_hex(item_id))
@@ -1237,9 +1237,9 @@ class Archiver:
                                   help='set umask to M (local and remote, default: %(default)04o)')
         common_group.add_argument('--remote-path', dest='remote_path', metavar='PATH',
                                   help='set remote path to executable (default: "borg")')
-        common_group.add_argument('--consider-checkpoint-files', dest='consider_checkpoint_files',
+        common_group.add_argument('--consider-part-files', dest='consider_part_files',
                                   action='store_true', default=False,
-                                  help='treat checkpoint files like normal files (e.g. to list/extract them)')
+                                  help='treat part files like normal files (e.g. to list/extract them)')
 
         parser = argparse.ArgumentParser(prog=prog, description='Borg - Deduplicated Backups')
         parser.add_argument('-V', '--version', action='version', version='%(prog)s ' + __version__,

+ 1 - 1
src/borg/constants.py

@@ -2,7 +2,7 @@
 ITEM_KEYS = frozenset(['path', 'source', 'rdev', 'chunks', 'chunks_healthy', 'hardlink_master',
                        'mode', 'user', 'group', 'uid', 'gid', 'mtime', 'atime', 'ctime',
                        'xattrs', 'bsdflags', 'acl_nfs4', 'acl_access', 'acl_default', 'acl_extended',
-                       'checkpoint'])
+                       'part'])
 
 # this is the set of keys that are always present in items:
 REQUIRED_ITEM_KEYS = frozenset(['path', 'mtime', ])

+ 1 - 1
src/borg/item.py

@@ -155,7 +155,7 @@ class Item(PropDict):
     deleted = PropDict._make_property('deleted', bool)
     nlink = PropDict._make_property('nlink', int)
 
-    checkpoint = PropDict._make_property('checkpoint', int)
+    part = PropDict._make_property('part', int)
 
 
 class EncryptedKey(PropDict):