|
@@ -100,7 +100,8 @@ def with_archive(method):
|
|
|
@functools.wraps(method)
|
|
|
def wrapper(self, args, repository, key, manifest, **kwargs):
|
|
|
archive = Archive(repository, key, manifest, args.location.archive,
|
|
|
- numeric_owner=getattr(args, 'numeric_owner', False), cache=kwargs.get('cache'))
|
|
|
+ numeric_owner=getattr(args, 'numeric_owner', False), cache=kwargs.get('cache'),
|
|
|
+ consider_checkpoint_files=args.consider_checkpoint_files)
|
|
|
return method(self, args, repository=repository, manifest=manifest, key=key, archive=archive, **kwargs)
|
|
|
return wrapper
|
|
|
|
|
@@ -668,7 +669,8 @@ class Archiver:
|
|
|
print_output(line)
|
|
|
|
|
|
archive1 = archive
|
|
|
- archive2 = Archive(repository, key, manifest, args.archive2)
|
|
|
+ archive2 = Archive(repository, key, manifest, args.archive2,
|
|
|
+ consider_checkpoint_files=args.consider_checkpoint_files)
|
|
|
|
|
|
can_compare_chunk_ids = archive1.metadata.get(b'chunker_params', False) == archive2.metadata.get(
|
|
|
b'chunker_params', True) or args.same_chunker_params
|
|
@@ -753,7 +755,8 @@ class Archiver:
|
|
|
|
|
|
with cache_if_remote(repository) as cached_repo:
|
|
|
if args.location.archive:
|
|
|
- archive = Archive(repository, key, manifest, args.location.archive)
|
|
|
+ archive = Archive(repository, key, manifest, args.location.archive,
|
|
|
+ consider_checkpoint_files=args.consider_checkpoint_files)
|
|
|
else:
|
|
|
archive = None
|
|
|
operations = FuseOperations(key, repository, manifest, archive, cached_repo)
|
|
@@ -779,7 +782,8 @@ class Archiver:
|
|
|
if args.location.archive:
|
|
|
matcher, _ = self.build_matcher(args.excludes, args.paths)
|
|
|
with Cache(repository, key, manifest, lock_wait=self.lock_wait) as cache:
|
|
|
- archive = Archive(repository, key, manifest, args.location.archive, cache=cache)
|
|
|
+ archive = Archive(repository, key, manifest, args.location.archive, cache=cache,
|
|
|
+ consider_checkpoint_files=args.consider_checkpoint_files)
|
|
|
|
|
|
if args.format:
|
|
|
format = args.format
|
|
@@ -981,7 +985,8 @@ class Archiver:
|
|
|
@with_repository()
|
|
|
def do_debug_dump_archive_items(self, args, repository, manifest, key):
|
|
|
"""dump (decrypted, decompressed) archive items metadata (not: data)"""
|
|
|
- archive = Archive(repository, key, manifest, args.location.archive)
|
|
|
+ archive = Archive(repository, key, manifest, args.location.archive,
|
|
|
+ consider_checkpoint_files=args.consider_checkpoint_files)
|
|
|
for i, item_id in enumerate(archive.metadata[b'items']):
|
|
|
_, data = key.decrypt(item_id, repository.get(item_id))
|
|
|
filename = '%06d_%s.items' % (i, bin_to_hex(item_id))
|
|
@@ -1232,6 +1237,9 @@ class Archiver:
|
|
|
help='set umask to M (local and remote, default: %(default)04o)')
|
|
|
common_group.add_argument('--remote-path', dest='remote_path', metavar='PATH',
|
|
|
help='set remote path to executable (default: "borg")')
|
|
|
+ common_group.add_argument('--consider-checkpoint-files', dest='consider_checkpoint_files',
|
|
|
+ action='store_true', default=False,
|
|
|
+ help='treat checkpoint files like normal files (e.g. to list/extract them)')
|
|
|
|
|
|
parser = argparse.ArgumentParser(prog=prog, description='Borg - Deduplicated Backups')
|
|
|
parser.add_argument('-V', '--version', action='version', version='%(prog)s ' + __version__,
|