archive.py 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666
  1. from datetime import datetime, timedelta, timezone
  2. from getpass import getuser
  3. from itertools import groupby
  4. import errno
  5. import shutil
  6. import tempfile
  7. from attic.key import key_factory
  8. from attic.remote import cache_if_remote
  9. import msgpack
  10. import os
  11. import socket
  12. import stat
  13. import sys
  14. import time
  15. from io import BytesIO
  16. from attic import xattr
  17. from attic.chunker import chunkify
  18. from attic.hashindex import ChunkIndex
  19. from attic.helpers import Error, uid2user, user2uid, gid2group, group2gid, \
  20. Manifest, Statistics, decode_dict, st_mtime_ns, make_path_safe, StableDict
  21. ITEMS_BUFFER = 1024 * 1024
  22. CHUNK_MIN = 1024
  23. WINDOW_SIZE = 0xfff
  24. CHUNK_MASK = 0xffff
  25. utime_supports_fd = os.utime in getattr(os, 'supports_fd', {})
  26. has_mtime_ns = sys.version >= '3.3'
  27. has_lchmod = hasattr(os, 'lchmod')
  28. has_lchflags = hasattr(os, 'lchflags')
  29. class DownloadPipeline:
  30. def __init__(self, repository, key):
  31. self.repository = repository
  32. self.key = key
  33. def unpack_many(self, ids, filter=None, preload=False):
  34. unpacker = msgpack.Unpacker(use_list=False)
  35. for data in self.fetch_many(ids):
  36. unpacker.feed(data)
  37. items = [decode_dict(item, (b'path', b'source', b'user', b'group')) for item in unpacker]
  38. if filter:
  39. items = [item for item in items if filter(item)]
  40. if preload:
  41. for item in items:
  42. if b'chunks' in item:
  43. self.repository.preload([c[0] for c in item[b'chunks']])
  44. for item in items:
  45. yield item
  46. def fetch_many(self, ids, is_preloaded=False):
  47. for id_, data in zip(ids, self.repository.get_many(ids, is_preloaded=is_preloaded)):
  48. yield self.key.decrypt(id_, data)
  49. class ChunkBuffer:
  50. BUFFER_SIZE = 1 * 1024 * 1024
  51. def __init__(self, key):
  52. self.buffer = BytesIO()
  53. self.packer = msgpack.Packer(unicode_errors='surrogateescape')
  54. self.chunks = []
  55. self.key = key
  56. def add(self, item):
  57. self.buffer.write(self.packer.pack(StableDict(item)))
  58. if self.is_full():
  59. self.flush()
  60. def write_chunk(self, chunk):
  61. raise NotImplementedError
  62. def flush(self, flush=False):
  63. if self.buffer.tell() == 0:
  64. return
  65. self.buffer.seek(0)
  66. chunks = list(bytes(s) for s in chunkify(self.buffer, WINDOW_SIZE, CHUNK_MASK, CHUNK_MIN, self.key.chunk_seed))
  67. self.buffer.seek(0)
  68. self.buffer.truncate(0)
  69. # Leave the last parital chunk in the buffer unless flush is True
  70. end = None if flush or len(chunks) == 1 else -1
  71. for chunk in chunks[:end]:
  72. self.chunks.append(self.write_chunk(chunk))
  73. if end == -1:
  74. self.buffer.write(chunks[-1])
  75. def is_full(self):
  76. return self.buffer.tell() > self.BUFFER_SIZE
  77. class CacheChunkBuffer(ChunkBuffer):
  78. def __init__(self, cache, key, stats):
  79. super(CacheChunkBuffer, self).__init__(key)
  80. self.cache = cache
  81. self.stats = stats
  82. def write_chunk(self, chunk):
  83. id_, _, _ = self.cache.add_chunk(self.key.id_hash(chunk), chunk, self.stats)
  84. return id_
  85. class Archive:
  86. class DoesNotExist(Error):
  87. """Archive {} does not exist"""
  88. class AlreadyExists(Error):
  89. """Archive {} already exists"""
  90. def __init__(self, repository, key, manifest, name, cache=None, create=False,
  91. checkpoint_interval=300, numeric_owner=False):
  92. self.cwd = os.getcwd()
  93. self.key = key
  94. self.repository = repository
  95. self.cache = cache
  96. self.manifest = manifest
  97. self.hard_links = {}
  98. self.stats = Statistics()
  99. self.name = name
  100. self.checkpoint_interval = checkpoint_interval
  101. self.numeric_owner = numeric_owner
  102. self.items_buffer = CacheChunkBuffer(self.cache, self.key, self.stats)
  103. self.pipeline = DownloadPipeline(self.repository, self.key)
  104. if create:
  105. if name in manifest.archives:
  106. raise self.AlreadyExists(name)
  107. self.last_checkpoint = time.time()
  108. i = 0
  109. while True:
  110. self.checkpoint_name = '%s.checkpoint%s' % (name, i and ('.%d' % i) or '')
  111. if not self.checkpoint_name in manifest.archives:
  112. break
  113. i += 1
  114. else:
  115. if name not in self.manifest.archives:
  116. raise self.DoesNotExist(name)
  117. info = self.manifest.archives[name]
  118. self.load(info[b'id'])
  119. def load(self, id):
  120. self.id = id
  121. data = self.key.decrypt(self.id, self.repository.get(self.id))
  122. self.metadata = msgpack.unpackb(data)
  123. if self.metadata[b'version'] != 1:
  124. raise Exception('Unknown archive metadata version')
  125. decode_dict(self.metadata, (b'name', b'hostname', b'username', b'time'))
  126. self.metadata[b'cmdline'] = [arg.decode('utf-8', 'surrogateescape') for arg in self.metadata[b'cmdline']]
  127. self.name = self.metadata[b'name']
  128. @property
  129. def ts(self):
  130. """Timestamp of archive creation in UTC"""
  131. t, f = self.metadata[b'time'].split('.', 1)
  132. return datetime.strptime(t, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) + timedelta(seconds=float('.' + f))
  133. def __repr__(self):
  134. return 'Archive(%r)' % self.name
  135. def iter_items(self, filter=None, preload=False):
  136. for item in self.pipeline.unpack_many(self.metadata[b'items'], filter=filter, preload=preload):
  137. yield item
  138. def add_item(self, item):
  139. self.items_buffer.add(item)
  140. if time.time() - self.last_checkpoint > self.checkpoint_interval:
  141. self.write_checkpoint()
  142. self.last_checkpoint = time.time()
  143. def write_checkpoint(self):
  144. self.save(self.checkpoint_name)
  145. del self.manifest.archives[self.checkpoint_name]
  146. self.cache.chunk_decref(self.id, self.stats)
  147. def save(self, name=None):
  148. name = name or self.name
  149. if name in self.manifest.archives:
  150. raise self.AlreadyExists(name)
  151. self.items_buffer.flush(flush=True)
  152. metadata = StableDict({
  153. 'version': 1,
  154. 'name': name,
  155. 'items': self.items_buffer.chunks,
  156. 'cmdline': sys.argv,
  157. 'hostname': socket.gethostname(),
  158. 'username': getuser(),
  159. 'time': datetime.utcnow().isoformat(),
  160. })
  161. data = msgpack.packb(metadata, unicode_errors='surrogateescape')
  162. self.id = self.key.id_hash(data)
  163. self.cache.add_chunk(self.id, data, self.stats)
  164. self.manifest.archives[name] = {'id': self.id, 'time': metadata['time']}
  165. self.manifest.write()
  166. self.repository.commit()
  167. self.cache.commit()
  168. def calc_stats(self, cache):
  169. def add(id):
  170. count, size, csize = self.cache.chunks[id]
  171. stats.update(size, csize, count == 1)
  172. self.cache.chunks[id] = count - 1, size, csize
  173. def add_file_chunks(chunks):
  174. for id, _, _ in chunks:
  175. add(id)
  176. # This function is a bit evil since it abuses the cache to calculate
  177. # the stats. The cache transaction must be rolled back afterwards
  178. unpacker = msgpack.Unpacker(use_list=False)
  179. cache.begin_txn()
  180. stats = Statistics()
  181. add(self.id)
  182. for id, chunk in zip(self.metadata[b'items'], self.repository.get_many(self.metadata[b'items'])):
  183. add(id)
  184. unpacker.feed(self.key.decrypt(id, chunk))
  185. for item in unpacker:
  186. if b'chunks' in item:
  187. stats.nfiles += 1
  188. add_file_chunks(item[b'chunks'])
  189. cache.rollback()
  190. return stats
  191. def extract_item(self, item, restore_attrs=True, dry_run=False):
  192. if dry_run:
  193. if b'chunks' in item:
  194. for _ in self.pipeline.fetch_many([c[0] for c in item[b'chunks']], is_preloaded=True):
  195. pass
  196. return
  197. dest = self.cwd
  198. if item[b'path'].startswith('/') or item[b'path'].startswith('..'):
  199. raise Exception('Path should be relative and local')
  200. path = os.path.join(dest, item[b'path'])
  201. # Attempt to remove existing files, ignore errors on failure
  202. try:
  203. st = os.lstat(path)
  204. if stat.S_ISDIR(st.st_mode):
  205. os.rmdir(path)
  206. else:
  207. os.unlink(path)
  208. except OSError:
  209. pass
  210. mode = item[b'mode']
  211. if stat.S_ISDIR(mode):
  212. if not os.path.exists(path):
  213. os.makedirs(path)
  214. if restore_attrs:
  215. self.restore_attrs(path, item)
  216. elif stat.S_ISREG(mode):
  217. if not os.path.exists(os.path.dirname(path)):
  218. os.makedirs(os.path.dirname(path))
  219. # Hard link?
  220. if b'source' in item:
  221. source = os.path.join(dest, item[b'source'])
  222. if os.path.exists(path):
  223. os.unlink(path)
  224. os.link(source, path)
  225. else:
  226. with open(path, 'wb') as fd:
  227. ids = [c[0] for c in item[b'chunks']]
  228. for data in self.pipeline.fetch_many(ids, is_preloaded=True):
  229. fd.write(data)
  230. fd.flush()
  231. self.restore_attrs(path, item, fd=fd.fileno())
  232. elif stat.S_ISFIFO(mode):
  233. if not os.path.exists(os.path.dirname(path)):
  234. os.makedirs(os.path.dirname(path))
  235. os.mkfifo(path)
  236. self.restore_attrs(path, item)
  237. elif stat.S_ISLNK(mode):
  238. if not os.path.exists(os.path.dirname(path)):
  239. os.makedirs(os.path.dirname(path))
  240. source = item[b'source']
  241. if os.path.exists(path):
  242. os.unlink(path)
  243. os.symlink(source, path)
  244. self.restore_attrs(path, item, symlink=True)
  245. elif stat.S_ISCHR(mode) or stat.S_ISBLK(mode):
  246. os.mknod(path, item[b'mode'], item[b'rdev'])
  247. self.restore_attrs(path, item)
  248. else:
  249. raise Exception('Unknown archive item type %r' % item[b'mode'])
  250. def restore_attrs(self, path, item, symlink=False, fd=None):
  251. xattrs = item.get(b'xattrs')
  252. if xattrs:
  253. for k, v in xattrs.items():
  254. try:
  255. xattr.setxattr(fd or path, k, v, follow_symlinks=False)
  256. except OSError as e:
  257. if e.errno != errno.ENOTSUP:
  258. raise
  259. uid = gid = None
  260. if not self.numeric_owner:
  261. uid = user2uid(item[b'user'])
  262. gid = group2gid(item[b'group'])
  263. uid = uid or item[b'uid']
  264. gid = gid or item[b'gid']
  265. # This code is a bit of a mess due to os specific differences
  266. try:
  267. if fd:
  268. os.fchown(fd, uid, gid)
  269. else:
  270. os.lchown(path, uid, gid)
  271. except OSError:
  272. pass
  273. if fd:
  274. os.fchmod(fd, item[b'mode'])
  275. elif not symlink:
  276. os.chmod(path, item[b'mode'])
  277. elif has_lchmod: # Not available on Linux
  278. os.lchmod(path, item[b'mode'])
  279. if fd and utime_supports_fd: # Python >= 3.3
  280. os.utime(fd, None, ns=(item[b'mtime'], item[b'mtime']))
  281. elif utime_supports_fd: # Python >= 3.3
  282. os.utime(path, None, ns=(item[b'mtime'], item[b'mtime']), follow_symlinks=False)
  283. elif not symlink:
  284. os.utime(path, (item[b'mtime'] / 10**9, item[b'mtime'] / 10**9))
  285. # Only available on OS X and FreeBSD
  286. if has_lchflags and b'bsdflags' in item:
  287. try:
  288. os.lchflags(path, item[b'bsdflags'])
  289. except OSError:
  290. pass
  291. def delete(self, stats):
  292. unpacker = msgpack.Unpacker(use_list=False)
  293. for items_id, data in zip(self.metadata[b'items'], self.repository.get_many(self.metadata[b'items'])):
  294. unpacker.feed(self.key.decrypt(items_id, data))
  295. self.cache.chunk_decref(items_id, stats)
  296. for item in unpacker:
  297. if b'chunks' in item:
  298. for chunk_id, size, csize in item[b'chunks']:
  299. self.cache.chunk_decref(chunk_id, stats)
  300. self.cache.chunk_decref(self.id, stats)
  301. del self.manifest.archives[self.name]
  302. def stat_attrs(self, st, path):
  303. item = {
  304. b'mode': st.st_mode,
  305. b'uid': st.st_uid, b'user': uid2user(st.st_uid),
  306. b'gid': st.st_gid, b'group': gid2group(st.st_gid),
  307. b'mtime': st_mtime_ns(st),
  308. }
  309. if self.numeric_owner:
  310. item[b'user'] = item[b'group'] = None
  311. xattrs = xattr.get_all(path, follow_symlinks=False)
  312. if xattrs:
  313. item[b'xattrs'] = StableDict(xattrs)
  314. if has_lchflags and st.st_flags:
  315. item[b'bsdflags'] = st.st_flags
  316. return item
  317. def process_item(self, path, st):
  318. item = {b'path': make_path_safe(path)}
  319. item.update(self.stat_attrs(st, path))
  320. self.add_item(item)
  321. def process_dev(self, path, st):
  322. item = {b'path': make_path_safe(path), b'rdev': st.st_rdev}
  323. item.update(self.stat_attrs(st, path))
  324. self.add_item(item)
  325. def process_symlink(self, path, st):
  326. source = os.readlink(path)
  327. item = {b'path': make_path_safe(path), b'source': source}
  328. item.update(self.stat_attrs(st, path))
  329. self.add_item(item)
  330. def process_file(self, path, st, cache):
  331. safe_path = make_path_safe(path)
  332. # Is it a hard link?
  333. if st.st_nlink > 1:
  334. source = self.hard_links.get((st.st_ino, st.st_dev))
  335. if (st.st_ino, st.st_dev) in self.hard_links:
  336. item = self.stat_attrs(st, path)
  337. item.update({b'path': safe_path, b'source': source})
  338. self.add_item(item)
  339. return
  340. else:
  341. self.hard_links[st.st_ino, st.st_dev] = safe_path
  342. path_hash = self.key.id_hash(os.path.join(self.cwd, path).encode('utf-8', 'surrogateescape'))
  343. ids = cache.file_known_and_unchanged(path_hash, st)
  344. chunks = None
  345. if ids is not None:
  346. # Make sure all ids are available
  347. for id_ in ids:
  348. if not cache.seen_chunk(id_):
  349. break
  350. else:
  351. chunks = [cache.chunk_incref(id_, self.stats) for id_ in ids]
  352. # Only chunkify the file if needed
  353. if chunks is None:
  354. with open(path, 'rb') as fd:
  355. chunks = []
  356. for chunk in chunkify(fd, WINDOW_SIZE, CHUNK_MASK, CHUNK_MIN, self.key.chunk_seed):
  357. chunks.append(cache.add_chunk(self.key.id_hash(chunk), chunk, self.stats))
  358. cache.memorize_file(path_hash, st, [c[0] for c in chunks])
  359. item = {b'path': safe_path, b'chunks': chunks}
  360. item.update(self.stat_attrs(st, path))
  361. self.stats.nfiles += 1
  362. self.add_item(item)
  363. @staticmethod
  364. def list_archives(repository, key, manifest, cache=None):
  365. for name, info in manifest.archives.items():
  366. yield Archive(repository, key, manifest, name, cache=cache)
  367. class RobustUnpacker():
  368. """A restartable/robust version of the streaming msgpack unpacker
  369. """
  370. item_keys = [msgpack.packb(name) for name in ('path', 'mode', 'source', 'chunks', 'rdev', 'xattrs', 'user', 'group', 'uid', 'gid', 'mtime')]
  371. def __init__(self, validator):
  372. super(RobustUnpacker, self).__init__()
  373. self.validator = validator
  374. self._buffered_data = []
  375. self._resync = False
  376. self._unpacker = msgpack.Unpacker(object_hook=StableDict)
  377. def resync(self):
  378. self._buffered_data = []
  379. self._resync = True
  380. def feed(self, data):
  381. if self._resync:
  382. self._buffered_data.append(data)
  383. else:
  384. self._unpacker.feed(data)
  385. def __iter__(self):
  386. return self
  387. def __next__(self):
  388. if self._resync:
  389. data = b''.join(self._buffered_data)
  390. while self._resync:
  391. if not data:
  392. raise StopIteration
  393. # Abort early if the data does not look like a serialized dict
  394. if len(data) < 2 or ((data[0] & 0xf0) != 0x80) or ((data[1] & 0xe0) != 0xa0):
  395. data = data[1:]
  396. continue
  397. # Make sure it looks like an item dict
  398. for pattern in self.item_keys:
  399. if data[1:].startswith(pattern):
  400. break
  401. else:
  402. data = data[1:]
  403. continue
  404. self._unpacker = msgpack.Unpacker(object_hook=StableDict)
  405. self._unpacker.feed(data)
  406. try:
  407. item = next(self._unpacker)
  408. if self.validator(item):
  409. self._resync = False
  410. return item
  411. # Ignore exceptions that might be raised when feeding
  412. # msgpack with invalid data
  413. except (TypeError, ValueError, StopIteration):
  414. pass
  415. data = data[1:]
  416. else:
  417. return next(self._unpacker)
  418. class ArchiveChecker:
  419. def __init__(self):
  420. self.error_found = False
  421. self.possibly_superseded = set()
  422. self.tmpdir = tempfile.mkdtemp()
  423. def __del__(self):
  424. shutil.rmtree(self.tmpdir)
  425. def check(self, repository, repair=False):
  426. self.report_progress('Starting archive consistency check...')
  427. self.repair = repair
  428. self.repository = repository
  429. self.init_chunks()
  430. self.key = self.identify_key(repository)
  431. if not Manifest.MANIFEST_ID in self.chunks:
  432. self.manifest = self.rebuild_manifest()
  433. else:
  434. self.manifest, _ = Manifest.load(repository, key=self.key)
  435. self.rebuild_refcounts()
  436. self.verify_chunks()
  437. if not self.error_found:
  438. self.report_progress('Archive consistency check complete, no problems found.')
  439. return self.repair or not self.error_found
  440. def init_chunks(self):
  441. """Fetch a list of all object keys from repository
  442. """
  443. # Explicity set the initial hash table capacity to avoid performance issues
  444. # due to hash table "resonance"
  445. capacity = int(len(self.repository) * 1.2)
  446. self.chunks = ChunkIndex.create(os.path.join(self.tmpdir, 'chunks').encode('utf-8'), capacity=capacity)
  447. marker = None
  448. while True:
  449. result = self.repository.list(limit=10000, marker=marker)
  450. if not result:
  451. break
  452. marker = result[-1]
  453. for id_ in result:
  454. self.chunks[id_] = (0, 0, 0)
  455. def report_progress(self, msg, error=False):
  456. if error:
  457. self.error_found = True
  458. print(msg, file=sys.stderr if error else sys.stdout)
  459. def identify_key(self, repository):
  460. cdata = repository.get(next(self.chunks.iteritems())[0])
  461. return key_factory(repository, cdata)
  462. def rebuild_manifest(self):
  463. """Rebuild the manifest object if it is missing
  464. Iterates through all objects in the repository looking for archive metadata blocks.
  465. """
  466. self.report_progress('Rebuilding missing manifest, this might take some time...', error=True)
  467. manifest = Manifest(self.key, self.repository)
  468. for chunk_id, _ in self.chunks.iteritems():
  469. cdata = self.repository.get(chunk_id)
  470. data = self.key.decrypt(chunk_id, cdata)
  471. # Some basic sanity checks of the payload before feeding it into msgpack
  472. if len(data) < 2 or ((data[0] & 0xf0) != 0x80) or ((data[1] & 0xe0) != 0xa0):
  473. continue
  474. if not b'cmdline' in data or not b'\xa7version\x01' in data:
  475. continue
  476. try:
  477. archive = msgpack.unpackb(data)
  478. except:
  479. continue
  480. if isinstance(archive, dict) and b'items' in archive and b'cmdline' in archive:
  481. self.report_progress('Found archive ' + archive[b'name'].decode('utf-8'), error=True)
  482. manifest.archives[archive[b'name'].decode('utf-8')] = {b'id': chunk_id, b'time': archive[b'time']}
  483. self.report_progress('Manifest rebuild complete', error=True)
  484. return manifest
  485. def rebuild_refcounts(self):
  486. """Rebuild object reference counts by walking the metadata
  487. Missing and/or incorrect data is repaired when detected
  488. """
  489. # Exclude the manifest from chunks
  490. del self.chunks[Manifest.MANIFEST_ID]
  491. def mark_as_possibly_superseded(id_):
  492. if self.chunks.get(id_, (0,))[0] == 0:
  493. self.possibly_superseded.add(id_)
  494. def add_callback(chunk):
  495. id_ = self.key.id_hash(chunk)
  496. cdata = self.key.encrypt(chunk)
  497. add_reference(id_, len(chunk), len(cdata), cdata)
  498. return id_
  499. def add_reference(id_, size, csize, cdata=None):
  500. try:
  501. count, _, _ = self.chunks[id_]
  502. self.chunks[id_] = count + 1, size, csize
  503. except KeyError:
  504. assert cdata is not None
  505. self.chunks[id_] = 1, size, csize
  506. if self.repair:
  507. self.repository.put(id_, cdata)
  508. def verify_file_chunks(item):
  509. """Verifies that all file chunks are present
  510. Missing file chunks will be replaced with new chunks of the same
  511. length containing all zeros.
  512. """
  513. offset = 0
  514. chunk_list = []
  515. for chunk_id, size, csize in item[b'chunks']:
  516. if not chunk_id in self.chunks:
  517. # If a file chunk is missing, create an all empty replacement chunk
  518. self.report_progress('{}: Missing file chunk detected (Byte {}-{})'.format(item[b'path'].decode('utf-8', 'surrogateescape'), offset, offset + size), error=True)
  519. data = bytes(size)
  520. chunk_id = self.key.id_hash(data)
  521. cdata = self.key.encrypt(data)
  522. csize = len(cdata)
  523. add_reference(chunk_id, size, csize, cdata)
  524. else:
  525. add_reference(chunk_id, size, csize)
  526. chunk_list.append((chunk_id, size, csize))
  527. offset += size
  528. item[b'chunks'] = chunk_list
  529. def robust_iterator(archive):
  530. """Iterates through all archive items
  531. Missing item chunks will be skipped and the msgpack stream will be restarted
  532. """
  533. unpacker = RobustUnpacker(lambda item: isinstance(item, dict) and b'path' in item)
  534. _state = 0
  535. def missing_chunk_detector(chunk_id):
  536. nonlocal _state
  537. if _state % 2 != int(not chunk_id in self.chunks):
  538. _state += 1
  539. return _state
  540. for state, items in groupby(archive[b'items'], missing_chunk_detector):
  541. items = list(items)
  542. if state % 2:
  543. self.report_progress('Archive metadata damage detected', error=True)
  544. continue
  545. if state > 0:
  546. unpacker.resync()
  547. for chunk_id, cdata in zip(items, repository.get_many(items)):
  548. unpacker.feed(self.key.decrypt(chunk_id, cdata))
  549. for item in unpacker:
  550. yield item
  551. repository = cache_if_remote(self.repository)
  552. num_archives = len(self.manifest.archives)
  553. for i, (name, info) in enumerate(list(self.manifest.archives.items()), 1):
  554. self.report_progress('Analyzing archive {} ({}/{})'.format(name, i, num_archives))
  555. archive_id = info[b'id']
  556. if not archive_id in self.chunks:
  557. self.report_progress('Archive metadata block is missing', error=True)
  558. del self.manifest.archives[name]
  559. continue
  560. mark_as_possibly_superseded(archive_id)
  561. cdata = self.repository.get(archive_id)
  562. data = self.key.decrypt(archive_id, cdata)
  563. archive = StableDict(msgpack.unpackb(data))
  564. if archive[b'version'] != 1:
  565. raise Exception('Unknown archive metadata version')
  566. decode_dict(archive, (b'name', b'hostname', b'username', b'time')) # fixme: argv
  567. items_buffer = ChunkBuffer(self.key)
  568. items_buffer.write_chunk = add_callback
  569. for item in robust_iterator(archive):
  570. if b'chunks' in item:
  571. verify_file_chunks(item)
  572. items_buffer.add(item)
  573. items_buffer.flush(flush=True)
  574. for previous_item_id in archive[b'items']:
  575. mark_as_possibly_superseded(previous_item_id)
  576. archive[b'items'] = items_buffer.chunks
  577. data = msgpack.packb(archive, unicode_errors='surrogateescape')
  578. new_archive_id = self.key.id_hash(data)
  579. cdata = self.key.encrypt(data)
  580. add_reference(new_archive_id, len(data), len(cdata), cdata)
  581. info[b'id'] = new_archive_id
  582. def verify_chunks(self):
  583. unused = set()
  584. for id_, (count, size, csize) in self.chunks.iteritems():
  585. if count == 0:
  586. unused.add(id_)
  587. orphaned = unused - self.possibly_superseded
  588. if orphaned:
  589. self.report_progress('{} orphaned objects found'.format(len(orphaned)), error=True)
  590. if self.repair:
  591. for id_ in unused:
  592. self.repository.delete(id_)
  593. self.manifest.write()
  594. self.repository.commit()