archive.py 3.0 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788
  1. import msgpack
  2. from attic.testsuite import AtticTestCase
  3. from attic.archive import CacheChunkBuffer, RobustUnpacker
  4. from attic.key import PlaintextKey, ZlibCompressor
  5. class MockCache:
  6. def __init__(self):
  7. self.objects = {}
  8. def add_chunk(self, id, data, stats=None):
  9. self.objects[id] = data
  10. return id, len(data), len(data)
  11. class ChunkBufferTestCase(AtticTestCase):
  12. def test(self):
  13. data = [{b'foo': 1}, {b'bar': 2}]
  14. cache = MockCache()
  15. key = PlaintextKey(ZlibCompressor())
  16. chunks = CacheChunkBuffer(cache, key, None)
  17. for d in data:
  18. chunks.add(d)
  19. chunks.flush()
  20. chunks.flush(flush=True)
  21. self.assert_equal(len(chunks.chunks), 2)
  22. unpacker = msgpack.Unpacker()
  23. for id in chunks.chunks:
  24. unpacker.feed(cache.objects[id])
  25. self.assert_equal(data, list(unpacker))
  26. class RobustUnpackerTestCase(AtticTestCase):
  27. def make_chunks(self, items):
  28. return b''.join(msgpack.packb({'path': item}) for item in items)
  29. def _validator(self, value):
  30. return isinstance(value, dict) and value.get(b'path') in (b'foo', b'bar', b'boo', b'baz')
  31. def process(self, input):
  32. unpacker = RobustUnpacker(validator=self._validator)
  33. result = []
  34. for should_sync, chunks in input:
  35. if should_sync:
  36. unpacker.resync()
  37. for data in chunks:
  38. unpacker.feed(data)
  39. for item in unpacker:
  40. result.append(item)
  41. return result
  42. def test_extra_garbage_no_sync(self):
  43. chunks = [(False, [self.make_chunks([b'foo', b'bar'])]),
  44. (False, [b'garbage'] + [self.make_chunks([b'boo', b'baz'])])]
  45. result = self.process(chunks)
  46. self.assert_equal(result, [
  47. {b'path': b'foo'}, {b'path': b'bar'},
  48. 103, 97, 114, 98, 97, 103, 101,
  49. {b'path': b'boo'},
  50. {b'path': b'baz'}])
  51. def split(self, left, length):
  52. parts = []
  53. while left:
  54. parts.append(left[:length])
  55. left = left[length:]
  56. return parts
  57. def test_correct_stream(self):
  58. chunks = self.split(self.make_chunks([b'foo', b'bar', b'boo', b'baz']), 2)
  59. input = [(False, chunks)]
  60. result = self.process(input)
  61. self.assert_equal(result, [{b'path': b'foo'}, {b'path': b'bar'}, {b'path': b'boo'}, {b'path': b'baz'}])
  62. def test_missing_chunk(self):
  63. chunks = self.split(self.make_chunks([b'foo', b'bar', b'boo', b'baz']), 4)
  64. input = [(False, chunks[:3]), (True, chunks[4:])]
  65. result = self.process(input)
  66. self.assert_equal(result, [{b'path': b'foo'}, {b'path': b'boo'}, {b'path': b'baz'}])
  67. def test_corrupt_chunk(self):
  68. chunks = self.split(self.make_chunks([b'foo', b'bar', b'boo', b'baz']), 4)
  69. input = [(False, chunks[:3]), (True, [b'gar', b'bage'] + chunks[3:])]
  70. result = self.process(input)
  71. self.assert_equal(result, [{b'path': b'foo'}, {b'path': b'boo'}, {b'path': b'baz'}])