archive.py 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293
  1. import msgpack
  2. from attic.testsuite import AtticTestCase
  3. from attic.archive import CacheChunkBuffer, RobustUnpacker
  4. from attic.key import PlaintextKey, COMPR_DEFAULT
  5. class MockCache:
  6. def __init__(self):
  7. self.objects = {}
  8. def add_chunk(self, id, data, stats=None):
  9. self.objects[id] = data
  10. return id, len(data), len(data)
  11. class ChunkBufferTestCase(AtticTestCase):
  12. class MockArgs(object):
  13. repository = None
  14. compression = COMPR_DEFAULT
  15. mac = None
  16. def test(self):
  17. data = [{b'foo': 1}, {b'bar': 2}]
  18. cache = MockCache()
  19. key = PlaintextKey.create(None, self.MockArgs())
  20. chunks = CacheChunkBuffer(cache, key, None)
  21. for d in data:
  22. chunks.add(d)
  23. chunks.flush()
  24. chunks.flush(flush=True)
  25. self.assert_equal(len(chunks.chunks), 2)
  26. unpacker = msgpack.Unpacker()
  27. for id in chunks.chunks:
  28. unpacker.feed(cache.objects[id])
  29. self.assert_equal(data, list(unpacker))
  30. class RobustUnpackerTestCase(AtticTestCase):
  31. def make_chunks(self, items):
  32. return b''.join(msgpack.packb({'path': item}) for item in items)
  33. def _validator(self, value):
  34. return isinstance(value, dict) and value.get(b'path') in (b'foo', b'bar', b'boo', b'baz')
  35. def process(self, input):
  36. unpacker = RobustUnpacker(validator=self._validator)
  37. result = []
  38. for should_sync, chunks in input:
  39. if should_sync:
  40. unpacker.resync()
  41. for data in chunks:
  42. unpacker.feed(data)
  43. for item in unpacker:
  44. result.append(item)
  45. return result
  46. def test_extra_garbage_no_sync(self):
  47. chunks = [(False, [self.make_chunks([b'foo', b'bar'])]),
  48. (False, [b'garbage'] + [self.make_chunks([b'boo', b'baz'])])]
  49. result = self.process(chunks)
  50. self.assert_equal(result, [
  51. {b'path': b'foo'}, {b'path': b'bar'},
  52. 103, 97, 114, 98, 97, 103, 101,
  53. {b'path': b'boo'},
  54. {b'path': b'baz'}])
  55. def split(self, left, length):
  56. parts = []
  57. while left:
  58. parts.append(left[:length])
  59. left = left[length:]
  60. return parts
  61. def test_correct_stream(self):
  62. chunks = self.split(self.make_chunks([b'foo', b'bar', b'boo', b'baz']), 2)
  63. input = [(False, chunks)]
  64. result = self.process(input)
  65. self.assert_equal(result, [{b'path': b'foo'}, {b'path': b'bar'}, {b'path': b'boo'}, {b'path': b'baz'}])
  66. def test_missing_chunk(self):
  67. chunks = self.split(self.make_chunks([b'foo', b'bar', b'boo', b'baz']), 4)
  68. input = [(False, chunks[:3]), (True, chunks[4:])]
  69. result = self.process(input)
  70. self.assert_equal(result, [{b'path': b'foo'}, {b'path': b'boo'}, {b'path': b'baz'}])
  71. def test_corrupt_chunk(self):
  72. chunks = self.split(self.make_chunks([b'foo', b'bar', b'boo', b'baz']), 4)
  73. input = [(False, chunks[:3]), (True, [b'gar', b'bage'] + chunks[3:])]
  74. result = self.process(input)
  75. self.assert_equal(result, [{b'path': b'foo'}, {b'path': b'boo'}, {b'path': b'baz'}])