|
@@ -24,6 +24,8 @@ from .helpers import Error, uid2user, user2uid, gid2group, group2gid, \
|
|
|
from .platform import acl_get, acl_set
|
|
|
from .chunker import Chunker
|
|
|
from .hashindex import ChunkIndex
|
|
|
+from .repository import Repository
|
|
|
+
|
|
|
import msgpack
|
|
|
|
|
|
ITEMS_BUFFER = 1024 * 1024
|
|
@@ -494,23 +496,59 @@ Number of files: {0.stats.nfiles}'''.format(
|
|
|
self.cache.chunk_decref(self.id, self.stats)
|
|
|
del self.manifest.archives[self.name]
|
|
|
|
|
|
- def delete(self, stats, progress=False):
|
|
|
- unpacker = msgpack.Unpacker(use_list=False)
|
|
|
- items_ids = self.metadata[b'items']
|
|
|
- pi = ProgressIndicatorPercent(total=len(items_ids), msg="Decrementing references %3.0f%%", same_line=True)
|
|
|
- for (i, (items_id, data)) in enumerate(zip(items_ids, self.repository.get_many(items_ids))):
|
|
|
+ def delete(self, stats, progress=False, forced=False):
|
|
|
+ class ChunksIndexError(Error):
|
|
|
+ """Chunk ID {} missing from chunks index, corrupted chunks index - aborting transaction."""
|
|
|
+
|
|
|
+ def chunk_decref(id, stats):
|
|
|
+ nonlocal error
|
|
|
+ try:
|
|
|
+ self.cache.chunk_decref(id, stats)
|
|
|
+ except KeyError:
|
|
|
+ cid = hexlify(id).decode('ascii')
|
|
|
+ raise ChunksIndexError(cid)
|
|
|
+ except Repository.ObjectNotFound as e:
|
|
|
+ # object not in repo - strange, but we wanted to delete it anyway.
|
|
|
+ if not forced:
|
|
|
+ raise
|
|
|
+ error = True
|
|
|
+
|
|
|
+ error = False
|
|
|
+ try:
|
|
|
+ unpacker = msgpack.Unpacker(use_list=False)
|
|
|
+ items_ids = self.metadata[b'items']
|
|
|
+ pi = ProgressIndicatorPercent(total=len(items_ids), msg="Decrementing references %3.0f%%", same_line=True)
|
|
|
+ for (i, (items_id, data)) in enumerate(zip(items_ids, self.repository.get_many(items_ids))):
|
|
|
+ if progress:
|
|
|
+ pi.show(i)
|
|
|
+ unpacker.feed(self.key.decrypt(items_id, data))
|
|
|
+ chunk_decref(items_id, stats)
|
|
|
+ try:
|
|
|
+ for item in unpacker:
|
|
|
+ if b'chunks' in item:
|
|
|
+ for chunk_id, size, csize in item[b'chunks']:
|
|
|
+ chunk_decref(chunk_id, stats)
|
|
|
+ except (TypeError, ValueError):
|
|
|
+ # if items metadata spans multiple chunks and one chunk got dropped somehow,
|
|
|
+ # it could be that unpacker yields bad types
|
|
|
+ if not forced:
|
|
|
+ raise
|
|
|
+ error = True
|
|
|
if progress:
|
|
|
- pi.show(i)
|
|
|
- unpacker.feed(self.key.decrypt(items_id, data))
|
|
|
- self.cache.chunk_decref(items_id, stats)
|
|
|
- for item in unpacker:
|
|
|
- if b'chunks' in item:
|
|
|
- for chunk_id, size, csize in item[b'chunks']:
|
|
|
- self.cache.chunk_decref(chunk_id, stats)
|
|
|
- if progress:
|
|
|
- pi.finish()
|
|
|
- self.cache.chunk_decref(self.id, stats)
|
|
|
+ pi.finish()
|
|
|
+ except (msgpack.UnpackException, Repository.ObjectNotFound):
|
|
|
+ # items metadata corrupted
|
|
|
+ if not forced:
|
|
|
+ raise
|
|
|
+ error = True
|
|
|
+ # in forced delete mode, we try hard to delete at least the manifest entry,
|
|
|
+ # if possible also the archive superblock, even if processing the items raises
|
|
|
+ # some harmless exception.
|
|
|
+ chunk_decref(self.id, stats)
|
|
|
del self.manifest.archives[self.name]
|
|
|
+ if error:
|
|
|
+ logger.warning('forced deletion succeeded, but the deleted archive was corrupted.')
|
|
|
+ logger.warning('borg check --repair is required to free all space.')
|
|
|
|
|
|
def stat_attrs(self, st, path):
|
|
|
item = {
|