archive.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658
  1. from datetime import datetime, timedelta, timezone
  2. from getpass import getuser
  3. from itertools import groupby
  4. import errno
  5. import shutil
  6. import tempfile
  7. from attic.key import key_factory
  8. import msgpack
  9. import os
  10. import socket
  11. import stat
  12. import sys
  13. import time
  14. from io import BytesIO
  15. from attic import xattr
  16. from attic.chunker import chunkify
  17. from attic.hashindex import ChunkIndex
  18. from attic.helpers import Error, uid2user, user2uid, gid2group, group2gid, \
  19. Manifest, Statistics, decode_dict, st_mtime_ns, make_path_safe, StableDict
  20. ITEMS_BUFFER = 1024 * 1024
  21. CHUNK_MIN = 1024
  22. WINDOW_SIZE = 0xfff
  23. CHUNK_MASK = 0xffff
  24. utime_supports_fd = os.utime in getattr(os, 'supports_fd', {})
  25. has_mtime_ns = sys.version >= '3.3'
  26. has_lchmod = hasattr(os, 'lchmod')
  27. class DownloadPipeline:
  28. def __init__(self, repository, key):
  29. self.repository = repository
  30. self.key = key
  31. def unpack_many(self, ids, filter=None, preload=False):
  32. unpacker = msgpack.Unpacker(use_list=False)
  33. for data in self.fetch_many(ids):
  34. unpacker.feed(data)
  35. items = [decode_dict(item, (b'path', b'source', b'user', b'group')) for item in unpacker]
  36. if filter:
  37. items = [item for item in items if filter(item)]
  38. if preload:
  39. for item in items:
  40. if b'chunks' in item:
  41. self.repository.preload([c[0] for c in item[b'chunks']])
  42. for item in items:
  43. yield item
  44. def fetch_many(self, ids, is_preloaded=False):
  45. for id_, data in zip(ids, self.repository.get_many(ids, is_preloaded=is_preloaded)):
  46. yield self.key.decrypt(id_, data)
  47. class ChunkBuffer:
  48. BUFFER_SIZE = 1 * 1024 * 1024
  49. def __init__(self, key):
  50. self.buffer = BytesIO()
  51. self.packer = msgpack.Packer(unicode_errors='surrogateescape')
  52. self.chunks = []
  53. self.key = key
  54. def add(self, item):
  55. self.buffer.write(self.packer.pack(StableDict(item)))
  56. if self.is_full():
  57. self.flush()
  58. def write_chunk(self, chunk):
  59. raise NotImplementedError
  60. def flush(self, flush=False):
  61. if self.buffer.tell() == 0:
  62. return
  63. self.buffer.seek(0)
  64. chunks = list(bytes(s) for s in chunkify(self.buffer, WINDOW_SIZE, CHUNK_MASK, CHUNK_MIN, self.key.chunk_seed))
  65. self.buffer.seek(0)
  66. self.buffer.truncate(0)
  67. # Leave the last parital chunk in the buffer unless flush is True
  68. end = None if flush or len(chunks) == 1 else -1
  69. for chunk in chunks[:end]:
  70. self.chunks.append(self.write_chunk(chunk))
  71. if end == -1:
  72. self.buffer.write(chunks[-1])
  73. def is_full(self):
  74. return self.buffer.tell() > self.BUFFER_SIZE
  75. class CacheChunkBuffer(ChunkBuffer):
  76. def __init__(self, cache, key, stats):
  77. super(CacheChunkBuffer, self).__init__(key)
  78. self.cache = cache
  79. self.stats = stats
  80. def write_chunk(self, chunk):
  81. id_, _, _ = self.cache.add_chunk(self.key.id_hash(chunk), chunk, self.stats)
  82. return id_
  83. class Archive:
  84. class DoesNotExist(Error):
  85. """Archive {} does not exist"""
  86. class AlreadyExists(Error):
  87. """Archive {} already exists"""
  88. def __init__(self, repository, key, manifest, name, cache=None, create=False,
  89. checkpoint_interval=300, numeric_owner=False):
  90. self.cwd = os.getcwd()
  91. self.key = key
  92. self.repository = repository
  93. self.cache = cache
  94. self.manifest = manifest
  95. self.hard_links = {}
  96. self.stats = Statistics()
  97. self.name = name
  98. self.checkpoint_interval = checkpoint_interval
  99. self.numeric_owner = numeric_owner
  100. self.items_buffer = CacheChunkBuffer(self.cache, self.key, self.stats)
  101. self.pipeline = DownloadPipeline(self.repository, self.key)
  102. if create:
  103. if name in manifest.archives:
  104. raise self.AlreadyExists(name)
  105. self.last_checkpoint = time.time()
  106. i = 0
  107. while True:
  108. self.checkpoint_name = '%s.checkpoint%s' % (name, i and ('.%d' % i) or '')
  109. if not self.checkpoint_name in manifest.archives:
  110. break
  111. i += 1
  112. else:
  113. if name not in self.manifest.archives:
  114. raise self.DoesNotExist(name)
  115. info = self.manifest.archives[name]
  116. self.load(info[b'id'])
  117. def load(self, id):
  118. self.id = id
  119. data = self.key.decrypt(self.id, self.repository.get(self.id))
  120. self.metadata = msgpack.unpackb(data)
  121. if self.metadata[b'version'] != 1:
  122. raise Exception('Unknown archive metadata version')
  123. decode_dict(self.metadata, (b'name', b'hostname', b'username', b'time'))
  124. self.metadata[b'cmdline'] = [arg.decode('utf-8', 'surrogateescape') for arg in self.metadata[b'cmdline']]
  125. self.name = self.metadata[b'name']
  126. @property
  127. def ts(self):
  128. """Timestamp of archive creation in UTC"""
  129. t, f = self.metadata[b'time'].split('.', 1)
  130. return datetime.strptime(t, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) + timedelta(seconds=float('.' + f))
  131. def __repr__(self):
  132. return 'Archive(%r)' % self.name
  133. def iter_items(self, filter=None, preload=False):
  134. for item in self.pipeline.unpack_many(self.metadata[b'items'], filter=filter, preload=preload):
  135. yield item
  136. def add_item(self, item):
  137. self.items_buffer.add(item)
  138. if time.time() - self.last_checkpoint > self.checkpoint_interval:
  139. self.write_checkpoint()
  140. self.last_checkpoint = time.time()
  141. def write_checkpoint(self):
  142. self.save(self.checkpoint_name)
  143. del self.manifest.archives[self.checkpoint_name]
  144. self.cache.chunk_decref(self.id)
  145. def save(self, name=None):
  146. name = name or self.name
  147. if name in self.manifest.archives:
  148. raise self.AlreadyExists(name)
  149. self.items_buffer.flush(flush=True)
  150. metadata = StableDict({
  151. 'version': 1,
  152. 'name': name,
  153. 'items': self.items_buffer.chunks,
  154. 'cmdline': sys.argv,
  155. 'hostname': socket.gethostname(),
  156. 'username': getuser(),
  157. 'time': datetime.utcnow().isoformat(),
  158. })
  159. data = msgpack.packb(metadata, unicode_errors='surrogateescape')
  160. self.id = self.key.id_hash(data)
  161. self.cache.add_chunk(self.id, data, self.stats)
  162. self.manifest.archives[name] = {'id': self.id, 'time': metadata['time']}
  163. self.manifest.write()
  164. self.repository.commit()
  165. self.cache.commit()
  166. def calc_stats(self, cache):
  167. def add(id):
  168. count, size, csize = self.cache.chunks[id]
  169. stats.update(size, csize, count == 1)
  170. self.cache.chunks[id] = count - 1, size, csize
  171. def add_file_chunks(chunks):
  172. for id, _, _ in chunks:
  173. add(id)
  174. # This function is a bit evil since it abuses the cache to calculate
  175. # the stats. The cache transaction must be rolled back afterwards
  176. unpacker = msgpack.Unpacker(use_list=False)
  177. cache.begin_txn()
  178. stats = Statistics()
  179. add(self.id)
  180. for id, chunk in zip(self.metadata[b'items'], self.repository.get_many(self.metadata[b'items'])):
  181. add(id)
  182. unpacker.feed(self.key.decrypt(id, chunk))
  183. for item in unpacker:
  184. if b'chunks' in item:
  185. stats.nfiles += 1
  186. add_file_chunks(item[b'chunks'])
  187. cache.rollback()
  188. return stats
  189. def extract_item(self, item, restore_attrs=True, dry_run=False):
  190. if dry_run:
  191. if b'chunks' in item:
  192. for _ in self.pipeline.fetch_many([c[0] for c in item[b'chunks']], is_preloaded=True):
  193. pass
  194. return
  195. dest = self.cwd
  196. if item[b'path'].startswith('/') or item[b'path'].startswith('..'):
  197. raise Exception('Path should be relative and local')
  198. path = os.path.join(dest, item[b'path'])
  199. # Attempt to remove existing files, ignore errors on failure
  200. try:
  201. st = os.lstat(path)
  202. if stat.S_ISDIR(st.st_mode):
  203. os.rmdir(path)
  204. else:
  205. os.unlink(path)
  206. except OSError:
  207. pass
  208. mode = item[b'mode']
  209. if stat.S_ISDIR(mode):
  210. if not os.path.exists(path):
  211. os.makedirs(path)
  212. if restore_attrs:
  213. self.restore_attrs(path, item)
  214. elif stat.S_ISREG(mode):
  215. if not os.path.exists(os.path.dirname(path)):
  216. os.makedirs(os.path.dirname(path))
  217. # Hard link?
  218. if b'source' in item:
  219. source = os.path.join(dest, item[b'source'])
  220. if os.path.exists(path):
  221. os.unlink(path)
  222. os.link(source, path)
  223. else:
  224. with open(path, 'wb') as fd:
  225. ids = [c[0] for c in item[b'chunks']]
  226. for data in self.pipeline.fetch_many(ids, is_preloaded=True):
  227. fd.write(data)
  228. fd.flush()
  229. self.restore_attrs(path, item, fd=fd.fileno())
  230. elif stat.S_ISFIFO(mode):
  231. if not os.path.exists(os.path.dirname(path)):
  232. os.makedirs(os.path.dirname(path))
  233. os.mkfifo(path)
  234. self.restore_attrs(path, item)
  235. elif stat.S_ISLNK(mode):
  236. if not os.path.exists(os.path.dirname(path)):
  237. os.makedirs(os.path.dirname(path))
  238. source = item[b'source']
  239. if os.path.exists(path):
  240. os.unlink(path)
  241. os.symlink(source, path)
  242. self.restore_attrs(path, item, symlink=True)
  243. elif stat.S_ISCHR(mode) or stat.S_ISBLK(mode):
  244. os.mknod(path, item[b'mode'], item[b'rdev'])
  245. self.restore_attrs(path, item)
  246. else:
  247. raise Exception('Unknown archive item type %r' % item[b'mode'])
  248. def restore_attrs(self, path, item, symlink=False, fd=None):
  249. xattrs = item.get(b'xattrs')
  250. if xattrs:
  251. for k, v in xattrs.items():
  252. try:
  253. xattr.setxattr(fd or path, k, v)
  254. except OSError as e:
  255. if e.errno != errno.ENOTSUP:
  256. raise
  257. uid = gid = None
  258. if not self.numeric_owner:
  259. uid = user2uid(item[b'user'])
  260. gid = group2gid(item[b'group'])
  261. uid = uid or item[b'uid']
  262. gid = gid or item[b'gid']
  263. # This code is a bit of a mess due to os specific differences
  264. try:
  265. if fd:
  266. os.fchown(fd, uid, gid)
  267. else:
  268. os.lchown(path, uid, gid)
  269. except OSError:
  270. pass
  271. if fd:
  272. os.fchmod(fd, item[b'mode'])
  273. elif not symlink:
  274. os.chmod(path, item[b'mode'])
  275. elif has_lchmod: # Not available on Linux
  276. os.lchmod(path, item[b'mode'])
  277. if fd and utime_supports_fd: # Python >= 3.3
  278. os.utime(fd, None, ns=(item[b'mtime'], item[b'mtime']))
  279. elif utime_supports_fd: # Python >= 3.3
  280. os.utime(path, None, ns=(item[b'mtime'], item[b'mtime']), follow_symlinks=False)
  281. elif not symlink:
  282. os.utime(path, (item[b'mtime'] / 10**9, item[b'mtime'] / 10**9))
  283. def delete(self, cache):
  284. unpacker = msgpack.Unpacker(use_list=False)
  285. for id_, data in zip(self.metadata[b'items'], self.repository.get_many(self.metadata[b'items'])):
  286. unpacker.feed(self.key.decrypt(id_, data))
  287. self.cache.chunk_decref(id_)
  288. for item in unpacker:
  289. if b'chunks' in item:
  290. for chunk_id, size, csize in item[b'chunks']:
  291. self.cache.chunk_decref(chunk_id)
  292. self.cache.chunk_decref(self.id)
  293. del self.manifest.archives[self.name]
  294. self.manifest.write()
  295. self.repository.commit()
  296. cache.commit()
  297. def stat_attrs(self, st, path):
  298. item = {
  299. b'mode': st.st_mode,
  300. b'uid': st.st_uid, b'user': uid2user(st.st_uid),
  301. b'gid': st.st_gid, b'group': gid2group(st.st_gid),
  302. b'mtime': st_mtime_ns(st),
  303. }
  304. if self.numeric_owner:
  305. item[b'user'] = item[b'group'] = None
  306. xattrs = xattr.get_all(path, follow_symlinks=False)
  307. if xattrs:
  308. item[b'xattrs'] = StableDict(xattrs)
  309. return item
  310. def process_item(self, path, st):
  311. item = {b'path': make_path_safe(path)}
  312. item.update(self.stat_attrs(st, path))
  313. self.add_item(item)
  314. def process_dev(self, path, st):
  315. item = {b'path': make_path_safe(path), b'rdev': st.st_rdev}
  316. item.update(self.stat_attrs(st, path))
  317. self.add_item(item)
  318. def process_symlink(self, path, st):
  319. source = os.readlink(path)
  320. item = {b'path': make_path_safe(path), b'source': source}
  321. item.update(self.stat_attrs(st, path))
  322. self.add_item(item)
  323. def process_file(self, path, st, cache):
  324. safe_path = make_path_safe(path)
  325. # Is it a hard link?
  326. if st.st_nlink > 1:
  327. source = self.hard_links.get((st.st_ino, st.st_dev))
  328. if (st.st_ino, st.st_dev) in self.hard_links:
  329. item = self.stat_attrs(st, path)
  330. item.update({b'path': safe_path, b'source': source})
  331. self.add_item(item)
  332. return
  333. else:
  334. self.hard_links[st.st_ino, st.st_dev] = safe_path
  335. path_hash = self.key.id_hash(os.path.join(self.cwd, path).encode('utf-8', 'surrogateescape'))
  336. ids = cache.file_known_and_unchanged(path_hash, st)
  337. chunks = None
  338. if ids is not None:
  339. # Make sure all ids are available
  340. for id_ in ids:
  341. if not cache.seen_chunk(id_):
  342. break
  343. else:
  344. chunks = [cache.chunk_incref(id_, self.stats) for id_ in ids]
  345. # Only chunkify the file if needed
  346. if chunks is None:
  347. with open(path, 'rb') as fd:
  348. chunks = []
  349. for chunk in chunkify(fd, WINDOW_SIZE, CHUNK_MASK, CHUNK_MIN, self.key.chunk_seed):
  350. chunks.append(cache.add_chunk(self.key.id_hash(chunk), chunk, self.stats))
  351. cache.memorize_file(path_hash, st, [c[0] for c in chunks])
  352. item = {b'path': safe_path, b'chunks': chunks}
  353. item.update(self.stat_attrs(st, path))
  354. self.stats.nfiles += 1
  355. self.add_item(item)
  356. @staticmethod
  357. def list_archives(repository, key, manifest, cache=None):
  358. for name, info in manifest.archives.items():
  359. yield Archive(repository, key, manifest, name, cache=cache)
  360. class RobustUnpacker():
  361. """A restartable/robust version of the streaming msgpack unpacker
  362. """
  363. item_keys = [msgpack.packb(name) for name in ('path', 'mode', 'source', 'chunks', 'rdev', 'xattrs', 'user', 'group', 'uid', 'gid', 'mtime')]
  364. def __init__(self, validator):
  365. super(RobustUnpacker, self).__init__()
  366. self.validator = validator
  367. self._buffered_data = []
  368. self._resync = False
  369. self._unpacker = msgpack.Unpacker(object_hook=StableDict)
  370. def resync(self):
  371. self._buffered_data = []
  372. self._resync = True
  373. def feed(self, data):
  374. if self._resync:
  375. self._buffered_data.append(data)
  376. else:
  377. self._unpacker.feed(data)
  378. def __iter__(self):
  379. return self
  380. def __next__(self):
  381. if self._resync:
  382. data = b''.join(self._buffered_data)
  383. while self._resync:
  384. if not data:
  385. raise StopIteration
  386. # Abort early if the data does not look like a serialized dict
  387. if len(data) < 2 or ((data[0] & 0xf0) != 0x80) or ((data[1] & 0xe0) != 0xa0):
  388. data = data[1:]
  389. continue
  390. # Make sure it looks like an item dict
  391. for pattern in self.item_keys:
  392. if data[1:].startswith(pattern):
  393. break
  394. else:
  395. data = data[1:]
  396. continue
  397. self._unpacker = msgpack.Unpacker(object_hook=StableDict)
  398. self._unpacker.feed(data)
  399. try:
  400. item = next(self._unpacker)
  401. if self.validator(item):
  402. self._resync = False
  403. return item
  404. # Ignore exceptions that might be raised when feeding
  405. # msgpack with invalid data
  406. except (TypeError, ValueError, StopIteration):
  407. pass
  408. data = data[1:]
  409. else:
  410. return next(self._unpacker)
  411. class ArchiveChecker:
  412. def __init__(self):
  413. self.error_found = False
  414. self.possibly_superseded = set()
  415. self.tmpdir = tempfile.mkdtemp()
  416. def __del__(self):
  417. shutil.rmtree(self.tmpdir)
  418. def check(self, repository, repair=False):
  419. self.report_progress('Starting archive consistency check...')
  420. self.repair = repair
  421. self.repository = repository
  422. self.init_chunks()
  423. self.key = self.identify_key(repository)
  424. if not Manifest.MANIFEST_ID in self.chunks:
  425. self.manifest = self.rebuild_manifest()
  426. else:
  427. self.manifest, _ = Manifest.load(repository, key=self.key)
  428. self.rebuild_refcounts()
  429. self.verify_chunks()
  430. if not self.error_found:
  431. self.report_progress('Archive consistency check complete, no problems found.')
  432. return self.repair or not self.error_found
  433. def init_chunks(self):
  434. """Fetch a list of all object keys from repository
  435. """
  436. # Explicity set the initial hash table capacity to avoid performance issues
  437. # due to hash table "resonance"
  438. capacity = int(len(self.repository) * 1.2)
  439. self.chunks = ChunkIndex.create(os.path.join(self.tmpdir, 'chunks').encode('utf-8'), capacity=capacity)
  440. marker = None
  441. while True:
  442. result = self.repository.list(limit=10000, marker=marker)
  443. if not result:
  444. break
  445. marker = result[-1]
  446. for id_ in result:
  447. self.chunks[id_] = (0, 0, 0)
  448. def report_progress(self, msg, error=False):
  449. if error:
  450. self.error_found = True
  451. print(msg, file=sys.stderr if error else sys.stdout)
  452. def identify_key(self, repository):
  453. cdata = repository.get(next(self.chunks.iteritems())[0])
  454. return key_factory(repository, cdata)
  455. def rebuild_manifest(self):
  456. """Rebuild the manifest object if it is missing
  457. Iterates through all objects in the repository looking for archive metadata blocks.
  458. """
  459. self.report_progress('Rebuilding missing manifest, this might take some time...', error=True)
  460. manifest = Manifest(self.key, self.repository)
  461. for chunk_id, _ in self.chunks.iteritems():
  462. cdata = self.repository.get(chunk_id)
  463. data = self.key.decrypt(chunk_id, cdata)
  464. # Some basic sanity checks of the payload before feeding it into msgpack
  465. if len(data) < 2 or ((data[0] & 0xf0) != 0x80) or ((data[1] & 0xe0) != 0xa0):
  466. continue
  467. if not b'cmdline' in data or not b'\xa7version\x01' in data:
  468. continue
  469. try:
  470. archive = msgpack.unpackb(data)
  471. except:
  472. continue
  473. if isinstance(archive, dict) and b'items' in archive and b'cmdline' in archive:
  474. self.report_progress('Found archive ' + archive[b'name'].decode('utf-8'), error=True)
  475. manifest.archives[archive[b'name'].decode('utf-8')] = {b'id': chunk_id, b'time': archive[b'time']}
  476. self.report_progress('Manifest rebuild complete', error=True)
  477. return manifest
  478. def rebuild_refcounts(self):
  479. """Rebuild object reference counts by walking the metadata
  480. Missing and/or incorrect data is repaired when detected
  481. """
  482. # Exclude the manifest from chunks
  483. del self.chunks[Manifest.MANIFEST_ID]
  484. def mark_as_possibly_superseded(id_):
  485. if self.chunks.get(id_, (0,))[0] == 0:
  486. self.possibly_superseded.add(id_)
  487. def add_callback(chunk):
  488. id_ = self.key.id_hash(chunk)
  489. cdata = self.key.encrypt(chunk)
  490. add_reference(id_, len(chunk), len(cdata), cdata)
  491. return id_
  492. def add_reference(id_, size, csize, cdata=None):
  493. try:
  494. count, _, _ = self.chunks[id_]
  495. self.chunks[id_] = count + 1, size, csize
  496. except KeyError:
  497. assert cdata is not None
  498. self.chunks[id_] = 1, size, csize
  499. if self.repair:
  500. self.repository.put(id_, cdata)
  501. def verify_file_chunks(item):
  502. """Verifies that all file chunks are present
  503. Missing file chunks will be replaced with new chunks of the same
  504. length containing all zeros.
  505. """
  506. offset = 0
  507. chunk_list = []
  508. for chunk_id, size, csize in item[b'chunks']:
  509. if not chunk_id in self.chunks:
  510. # If a file chunk is missing, create an all empty replacement chunk
  511. self.report_progress('{}: Missing file chunk detected (Byte {}-{})'.format(item[b'path'].decode('utf-8', 'surrogateescape'), offset, offset + size), error=True)
  512. data = bytes(size)
  513. chunk_id = self.key.id_hash(data)
  514. cdata = self.key.encrypt(data)
  515. csize = len(cdata)
  516. add_reference(chunk_id, size, csize, cdata)
  517. else:
  518. add_reference(chunk_id, size, csize)
  519. chunk_list.append((chunk_id, size, csize))
  520. offset += size
  521. item[b'chunks'] = chunk_list
  522. def robust_iterator(archive):
  523. """Iterates through all archive items
  524. Missing item chunks will be skipped and the msgpack stream will be restarted
  525. """
  526. unpacker = RobustUnpacker(lambda item: isinstance(item, dict) and b'path' in item)
  527. _state = 0
  528. def missing_chunk_detector(chunk_id):
  529. nonlocal _state
  530. if _state % 2 != int(not chunk_id in self.chunks):
  531. _state += 1
  532. return _state
  533. for state, items in groupby(archive[b'items'], missing_chunk_detector):
  534. items = list(items)
  535. if state % 2:
  536. self.report_progress('Archive metadata damage detected', error=True)
  537. continue
  538. if state > 0:
  539. unpacker.resync()
  540. for chunk_id, cdata in zip(items, self.repository.get_many(items)):
  541. unpacker.feed(self.key.decrypt(chunk_id, cdata))
  542. for item in unpacker:
  543. yield item
  544. num_archives = len(self.manifest.archives)
  545. for i, (name, info) in enumerate(list(self.manifest.archives.items()), 1):
  546. self.report_progress('Analyzing archive {} ({}/{})'.format(name, i, num_archives))
  547. archive_id = info[b'id']
  548. if not archive_id in self.chunks:
  549. self.report_progress('Archive metadata block is missing', error=True)
  550. del self.manifest.archives[name]
  551. continue
  552. mark_as_possibly_superseded(archive_id)
  553. cdata = self.repository.get(archive_id)
  554. data = self.key.decrypt(archive_id, cdata)
  555. archive = StableDict(msgpack.unpackb(data))
  556. if archive[b'version'] != 1:
  557. raise Exception('Unknown archive metadata version')
  558. decode_dict(archive, (b'name', b'hostname', b'username', b'time')) # fixme: argv
  559. items_buffer = ChunkBuffer(self.key)
  560. items_buffer.write_chunk = add_callback
  561. for item in robust_iterator(archive):
  562. if b'chunks' in item:
  563. verify_file_chunks(item)
  564. items_buffer.add(item)
  565. items_buffer.flush(flush=True)
  566. for previous_item_id in archive[b'items']:
  567. mark_as_possibly_superseded(previous_item_id)
  568. archive[b'items'] = items_buffer.chunks
  569. data = msgpack.packb(archive, unicode_errors='surrogateescape')
  570. new_archive_id = self.key.id_hash(data)
  571. cdata = self.key.encrypt(data)
  572. add_reference(new_archive_id, len(data), len(cdata), cdata)
  573. info[b'id'] = new_archive_id
  574. def verify_chunks(self):
  575. unused = set()
  576. for id_, (count, size, csize) in self.chunks.iteritems():
  577. if count == 0:
  578. unused.add(id_)
  579. orphaned = unused - self.possibly_superseded
  580. if orphaned:
  581. self.report_progress('{} orphaned objects found'.format(len(orphaned)), error=True)
  582. if self.repair:
  583. for id_ in unused:
  584. self.repository.delete(id_)
  585. self.manifest.write()
  586. self.repository.commit()