archiver.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280
  1. import os
  2. import hashlib
  3. import logging
  4. import zlib
  5. import cPickle
  6. import argparse
  7. from chunkifier import chunkify
  8. from cache import Cache, NS_ARCHIVES, NS_CHUNKS
  9. from bandstore import BandStore
  10. from helpers import location_validator
  11. CHUNK_SIZE = 55001
  12. class Archive(object):
  13. def __init__(self, store, cache, name=None):
  14. self.store = store
  15. self.cache = cache
  16. self.items = []
  17. self.chunks = []
  18. self.chunk_idx = {}
  19. if name:
  20. self.open(name)
  21. def open(self, name):
  22. id = self.cache.archives[name]
  23. data = self.store.get(NS_ARCHIVES, id)
  24. if hashlib.sha256(data).digest() != id:
  25. raise Exception('Archive hash did not match')
  26. archive = cPickle.loads(zlib.decompress(data))
  27. self.items = archive['items']
  28. self.name = archive['name']
  29. self.chunks = archive['chunks']
  30. for i, (id, csize, osize) in enumerate(archive['chunks']):
  31. self.chunk_idx[i] = id
  32. def save(self, name):
  33. archive = {'name': name, 'items': self.items, 'chunks': self.chunks}
  34. data = zlib.compress(cPickle.dumps(archive))
  35. self.id = hashlib.sha256(data).digest()
  36. self.store.put(NS_ARCHIVES, self.id, data)
  37. self.store.commit()
  38. def add_chunk(self, id, csize, osize):
  39. try:
  40. return self.chunk_idx[id]
  41. except KeyError:
  42. idx = len(self.chunks)
  43. self.chunks.append((id, csize, osize))
  44. self.chunk_idx[id] = idx
  45. return idx
  46. def stats(self, cache):
  47. total_osize = 0
  48. total_csize = 0
  49. total_usize = 0
  50. chunk_count = {}
  51. for item in self.items:
  52. if item['type'] == 'FILE':
  53. total_osize += item['size']
  54. for idx in item['chunks']:
  55. id = self.chunk_idx[idx]
  56. chunk_count.setdefault(id, 0)
  57. chunk_count[id] += 1
  58. for id, c in chunk_count.items():
  59. count, csize, osize = cache.chunkmap[id]
  60. total_csize += csize
  61. if c == count:
  62. total_usize += csize
  63. return dict(osize=total_osize, csize=total_csize, usize=total_usize)
  64. def list(self):
  65. for item in self.items:
  66. print item['path']
  67. def extract(self, dest=None):
  68. dest = dest or os.getcwdu()
  69. for item in self.items:
  70. assert item['path'][0] not in ('/', '\\', ':')
  71. path = os.path.join(dest, item['path'])
  72. logging.info(path)
  73. if item['type'] == 'DIR':
  74. if not os.path.exists(path):
  75. os.makedirs(path)
  76. if item['type'] == 'FILE':
  77. if not os.path.exists(os.path.dirname(path)):
  78. os.makedirs(os.path.dirname(path))
  79. with open(path, 'wb') as fd:
  80. for chunk in item['chunks']:
  81. id = self.chunk_idx[chunk]
  82. data = self.store.get(NS_CHUNKS, id)
  83. cid = data[:32]
  84. data = data[32:]
  85. if hashlib.sha256(data).digest() != cid:
  86. raise Exception('Invalid chunk checksum')
  87. if hashlib.sha256(zlib.decompress(data)).digest() != id:
  88. raise Exception('Invalid chunk checksum')
  89. fd.write(zlib.decompress(data))
  90. def verify(self):
  91. for item in self.items:
  92. if item['type'] == 'FILE':
  93. for chunk in item['chunks']:
  94. id = self.chunk_idx[chunk]
  95. data = self.store.get(NS_CHUNKS, id)
  96. data = self.store.get(NS_CHUNKS, id)
  97. cid = data[:32]
  98. data = data[32:]
  99. if (hashlib.sha256(data).digest() != cid or
  100. hashlib.sha256(zlib.decompress(data)).digest() != id):
  101. logging.error('%s ... ERROR', item['path'])
  102. break
  103. else:
  104. logging.info('%s ... OK', item['path'])
  105. def delete(self, cache):
  106. self.store.delete(NS_ARCHIVES, self.cache.archives[self.name])
  107. for item in self.items:
  108. if item['type'] == 'FILE':
  109. for c in item['chunks']:
  110. id = self.chunk_idx[c]
  111. cache.chunk_decref(id)
  112. self.store.commit()
  113. del cache.archives[self.name]
  114. cache.save()
  115. def create(self, name, paths, cache):
  116. if name in cache.archives:
  117. raise NameError('Archive already exists')
  118. for path in paths:
  119. for root, dirs, files in os.walk(path):
  120. for d in dirs:
  121. p = os.path.join(root, d)
  122. self.items.append(self.process_dir(p, cache))
  123. for f in files:
  124. p = os.path.join(root, f)
  125. entry = self.process_file(p, cache)
  126. if entry:
  127. self.items.append(entry)
  128. self.save(name)
  129. cache.archives[name] = self.id
  130. cache.save()
  131. def process_dir(self, path, cache):
  132. path = path.lstrip('/\\:')
  133. logging.info(path)
  134. return {'type': 'DIR', 'path': path}
  135. def process_file(self, path, cache):
  136. try:
  137. fd = open(path, 'rb')
  138. except IOError, e:
  139. logging.error(e)
  140. return
  141. with fd:
  142. path = path.lstrip('/\\:')
  143. logging.info(path)
  144. chunks = []
  145. size = 0
  146. for chunk in chunkify(fd, CHUNK_SIZE, 30):
  147. size += len(chunk)
  148. chunks.append(self.add_chunk(*cache.add_chunk(chunk)))
  149. return {'type': 'FILE', 'path': path, 'chunks': chunks, 'size': size}
  150. class Archiver(object):
  151. def pretty_size(self, v):
  152. if v > 1024 * 1024 * 1024:
  153. return '%.2f GB' % (v / 1024. / 1024. / 1024.)
  154. elif v > 1024 * 1024:
  155. return '%.2f MB' % (v / 1024. / 1024.)
  156. elif v > 1024:
  157. return '%.2f kB' % (v / 1024.)
  158. else:
  159. return str(v)
  160. def open_store(self, location):
  161. store = BandStore(location.path)
  162. cache = Cache(store)
  163. return store, cache
  164. def do_create(self, args):
  165. store, cache = self.open_store(args.archive)
  166. archive = Archive(store, cache)
  167. archive.create(args.archive.archive, args.paths, cache)
  168. def do_extract(self, args):
  169. store, cache = self.open_store(args.archive)
  170. archive = Archive(store, cache, args.archive.archive)
  171. archive.extract(args.dest)
  172. def do_delete(self, args):
  173. store, cache = self.open_store(args.archive)
  174. archive = Archive(store, cache, args.archive.archive)
  175. archive.delete(cache)
  176. def do_list(self, args):
  177. store, cache = self.open_store(args.src)
  178. if args.src.archive:
  179. archive = Archive(store, cache, args.src.archive)
  180. archive.list()
  181. else:
  182. for archive in sorted(cache.archives):
  183. print archive
  184. def do_verify(self, args):
  185. store, cache = self.open_store(args.archive)
  186. archive = Archive(store, cache, args.archive.archive)
  187. archive.verify()
  188. def do_info(self, args):
  189. store, cache = self.open_store(args.archive)
  190. archive = Archive(store, cache, args.archive.archive)
  191. stats = archive.stats(cache)
  192. print 'Original size:', self.pretty_size(stats['osize'])
  193. print 'Compressed size:', self.pretty_size(stats['csize'])
  194. print 'Unique data:', self.pretty_size(stats['usize'])
  195. def run(self):
  196. parser = argparse.ArgumentParser(description='Dedupestore')
  197. parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
  198. default=False,
  199. help='Verbose output')
  200. subparsers = parser.add_subparsers(title='Available subcommands')
  201. subparser = subparsers.add_parser('create')
  202. subparser.set_defaults(func=self.do_create)
  203. subparser.add_argument('archive', metavar='ARCHIVE',
  204. type=location_validator(archive=True),
  205. help='Archive to create')
  206. subparser.add_argument('paths', metavar='PATH', nargs='+', type=str,
  207. help='Paths to add to archive')
  208. subparser = subparsers.add_parser('extract')
  209. subparser.set_defaults(func=self.do_extract)
  210. subparser.add_argument('archive', metavar='ARCHIVE',
  211. type=location_validator(archive=True),
  212. help='Archive to create')
  213. subparser.add_argument('dest', metavar='DEST', type=str, nargs='?',
  214. help='Where to extract files')
  215. subparser = subparsers.add_parser('delete')
  216. subparser.set_defaults(func=self.do_delete)
  217. subparser.add_argument('archive', metavar='ARCHIVE',
  218. type=location_validator(archive=True),
  219. help='Archive to delete')
  220. subparser = subparsers.add_parser('list')
  221. subparser.set_defaults(func=self.do_list)
  222. subparser.add_argument('src', metavar='SRC', type=location_validator(),
  223. help='Store/Archive to list contents of')
  224. subparser= subparsers.add_parser('verify')
  225. subparser.set_defaults(func=self.do_verify)
  226. subparser.add_argument('archive', metavar='ARCHIVE',
  227. type=location_validator(archive=True),
  228. help='Archive to verity integrity of')
  229. subparser= subparsers.add_parser('info')
  230. subparser.set_defaults(func=self.do_info)
  231. subparser.add_argument('archive', metavar='ARCHIVE',
  232. type=location_validator(archive=True),
  233. help='Archive to display information about')
  234. args = parser.parse_args()
  235. if args.verbose:
  236. logging.basicConfig(level=logging.INFO, format='%(message)s')
  237. else:
  238. logging.basicConfig(level=logging.WARNING, format='%(message)s')
  239. args.func(args)
  240. def main():
  241. archiver = Archiver()
  242. archiver.run()
  243. if __name__ == '__main__':
  244. main()