archive.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353
  1. from __future__ import with_statement
  2. from datetime import datetime, timedelta
  3. from getpass import getuser
  4. import msgpack
  5. import os
  6. import socket
  7. import stat
  8. import sys
  9. from cStringIO import StringIO
  10. from xattr import xattr, XATTR_NOFOLLOW
  11. from ._speedups import chunkify
  12. from .helpers import uid2user, user2uid, gid2group, group2gid, \
  13. Counter, encode_filename, Statistics
  14. ITEMS_BUFFER = 1024 * 1024
  15. CHUNK_SIZE = 64 * 1024
  16. WINDOW_SIZE = 4096
  17. have_lchmod = hasattr(os, 'lchmod')
  18. linux = sys.platform == 'linux2'
  19. class Archive(object):
  20. class DoesNotExist(Exception):
  21. pass
  22. def __init__(self, store, key, manifest, name=None, cache=None):
  23. self.key = key
  24. self.store = store
  25. self.cache = cache
  26. self.manifest = manifest
  27. self.items = StringIO()
  28. self.items_ids = []
  29. self.hard_links = {}
  30. self.stats = Statistics()
  31. if name:
  32. try:
  33. info = self.manifest.archives[name]
  34. except KeyError:
  35. raise Archive.DoesNotExist
  36. self.load(info['id'])
  37. def load(self, id):
  38. self.id = id
  39. data = self.key.decrypt(self.id, self.store.get(self.id))
  40. self.metadata = msgpack.unpackb(data)
  41. if self.metadata['version'] != 1:
  42. raise Exception('Unknown archive metadata version')
  43. self.name = self.metadata['name']
  44. @property
  45. def ts(self):
  46. """Timestamp of archive creation in UTC"""
  47. t, f = self.metadata['time'].split('.', 1)
  48. return datetime.strptime(t, '%Y-%m-%dT%H:%M:%S') + timedelta(seconds=float('.' + f))
  49. def __repr__(self):
  50. return 'Archive(%r)' % self.name
  51. def iter_items(self, callback):
  52. unpacker = msgpack.Unpacker()
  53. counter = Counter(0)
  54. def cb(chunk, error, id):
  55. if error:
  56. raise error
  57. assert not error
  58. counter.dec()
  59. data = self.key.decrypt(id, chunk)
  60. unpacker.feed(data)
  61. for item in unpacker:
  62. callback(item)
  63. for id in self.metadata['items']:
  64. # Limit the number of concurrent items requests to 10
  65. self.store.flush_rpc(counter, 10)
  66. counter.inc()
  67. self.store.get(id, callback=cb, callback_data=id)
  68. def add_item(self, item):
  69. self.items.write(msgpack.packb(item))
  70. if self.items.tell() > ITEMS_BUFFER:
  71. self.flush_items()
  72. def flush_items(self, flush=False):
  73. if self.items.tell() == 0:
  74. return
  75. self.items.seek(0)
  76. chunks = list(str(s) for s in chunkify(self.items, CHUNK_SIZE, WINDOW_SIZE, self.key.chunk_seed))
  77. self.items.seek(0)
  78. self.items.truncate()
  79. for chunk in chunks[:-1]:
  80. id, _, _ = self.cache.add_chunk(self.key.id_hash(chunk), chunk, self.stats)
  81. self.items_ids.append(id)
  82. if flush or len(chunks) == 1:
  83. id, _, _ = self.cache.add_chunk(self.key.id_hash(chunks[-1]), chunks[-1], self.stats)
  84. self.items_ids.append(id)
  85. else:
  86. self.items.write(chunks[-1])
  87. def save(self, name, cache):
  88. if name in self.manifest.archives:
  89. raise ValueError('Archive %s already exists' % name)
  90. self.flush_items(flush=True)
  91. metadata = {
  92. 'version': 1,
  93. 'name': name,
  94. 'items': self.items_ids,
  95. 'cmdline': sys.argv,
  96. 'hostname': socket.gethostname(),
  97. 'username': getuser(),
  98. 'time': datetime.utcnow().isoformat(),
  99. }
  100. data = msgpack.packb(metadata)
  101. self.id = self.key.id_hash(data)
  102. cache.add_chunk(self.id, data, self.stats)
  103. self.manifest.archives[name] = {'id': self.id, 'time': metadata['time']}
  104. self.manifest.write()
  105. self.store.commit()
  106. cache.commit()
  107. def calc_stats(self, cache):
  108. # This function is a bit evil since it abuses the cache to calculate
  109. # the stats. The cache transaction must be rolled back afterwards
  110. def cb(chunk, error, id):
  111. assert not error
  112. data = self.key.decrypt(id, chunk)
  113. unpacker.feed(data)
  114. for item in unpacker:
  115. try:
  116. for id, size, csize in item['chunks']:
  117. count, _, _ = self.cache.chunks[id]
  118. stats.update(size, csize, count==1)
  119. stats.nfiles += 1
  120. self.cache.chunks[id] = count - 1, size, csize
  121. except KeyError:
  122. pass
  123. unpacker = msgpack.Unpacker()
  124. cache.begin_txn()
  125. stats = Statistics()
  126. for id in self.metadata['items']:
  127. self.store.get(id, callback=cb, callback_data=id)
  128. count, size, csize = self.cache.chunks[id]
  129. stats.update(size, csize, count==1)
  130. self.cache.chunks[id] = count - 1, size, csize
  131. self.store.flush_rpc()
  132. cache.rollback()
  133. return stats
  134. def extract_item(self, item, dest=None, start_cb=None, restore_attrs=True):
  135. dest = dest or os.getcwdu()
  136. dir_stat_queue = []
  137. assert item['path'][0] not in ('/', '\\', ':')
  138. path = os.path.join(dest, encode_filename(item['path']))
  139. mode = item['mode']
  140. if stat.S_ISDIR(mode):
  141. if not os.path.exists(path):
  142. os.makedirs(path)
  143. if restore_attrs:
  144. self.restore_attrs(path, item)
  145. elif stat.S_ISFIFO(mode):
  146. if not os.path.exists(os.path.dirname(path)):
  147. os.makedirs(os.path.dirname(path))
  148. os.mkfifo(path)
  149. self.restore_attrs(path, item)
  150. elif stat.S_ISLNK(mode):
  151. if not os.path.exists(os.path.dirname(path)):
  152. os.makedirs(os.path.dirname(path))
  153. source = item['source']
  154. if os.path.exists(path):
  155. os.unlink(path)
  156. os.symlink(source, path)
  157. self.restore_attrs(path, item, symlink=True)
  158. elif stat.S_ISREG(mode):
  159. if not os.path.exists(os.path.dirname(path)):
  160. os.makedirs(os.path.dirname(path))
  161. # Hard link?
  162. if 'source' in item:
  163. source = os.path.join(dest, item['source'])
  164. if os.path.exists(path):
  165. os.unlink(path)
  166. os.link(source, path)
  167. else:
  168. def extract_cb(chunk, error, (id, i)):
  169. if i == 0:
  170. state['fd'] = open(path, 'wb')
  171. start_cb(item)
  172. assert not error
  173. data = self.key.decrypt(id, chunk)
  174. state['fd'].write(data)
  175. if i == n - 1:
  176. state['fd'].close()
  177. self.restore_attrs(path, item)
  178. state = {}
  179. n = len(item['chunks'])
  180. ## 0 chunks indicates an empty (0 bytes) file
  181. if n == 0:
  182. open(path, 'wb').close()
  183. start_cb(item)
  184. self.restore_attrs(path, item)
  185. else:
  186. for i, (id, size, csize) in enumerate(item['chunks']):
  187. self.store.get(id, callback=extract_cb, callback_data=(id, i))
  188. else:
  189. raise Exception('Unknown archive item type %r' % item['mode'])
  190. def restore_attrs(self, path, item, symlink=False):
  191. xattrs = item.get('xattrs')
  192. if xattrs:
  193. xa = xattr(path, XATTR_NOFOLLOW)
  194. for k, v in xattrs.items():
  195. try:
  196. xa.set(k, v)
  197. except (IOError, KeyError):
  198. pass
  199. if have_lchmod:
  200. os.lchmod(path, item['mode'])
  201. elif not symlink:
  202. os.chmod(path, item['mode'])
  203. uid = user2uid(item['user']) or item['uid']
  204. gid = group2gid(item['group']) or item['gid']
  205. try:
  206. os.lchown(path, uid, gid)
  207. except OSError:
  208. pass
  209. if not symlink:
  210. # FIXME: We should really call futimes here (c extension required)
  211. os.utime(path, (item['mtime'], item['mtime']))
  212. def verify_file(self, item, start, result):
  213. def verify_chunk(chunk, error, (id, i)):
  214. if error:
  215. if not state:
  216. result(item, False)
  217. state[True] = True
  218. return
  219. if i == 0:
  220. start(item)
  221. data = self.key.decrypt(id, chunk)
  222. if i == n - 1:
  223. result(item, True)
  224. state = {}
  225. n = len(item['chunks'])
  226. if n == 0:
  227. start(item)
  228. result(item, True)
  229. else:
  230. for i, (id, size, csize) in enumerate(item['chunks']):
  231. self.store.get(id, callback=verify_chunk, callback_data=(id, i))
  232. def delete(self, cache):
  233. def callback(chunk, error, id):
  234. assert not error
  235. data = self.key.decrypt(id, chunk)
  236. unpacker.feed(data)
  237. for item in unpacker:
  238. try:
  239. for chunk_id, size, csize in item['chunks']:
  240. self.cache.chunk_decref(chunk_id)
  241. except KeyError:
  242. pass
  243. self.cache.chunk_decref(id)
  244. unpacker = msgpack.Unpacker()
  245. for id in self.metadata['items']:
  246. self.store.get(id, callback=callback, callback_data=id)
  247. self.store.flush_rpc()
  248. self.cache.chunk_decref(self.id)
  249. del self.manifest.archives[self.name]
  250. self.manifest.write()
  251. self.store.commit()
  252. cache.commit()
  253. def stat_attrs(self, st, path):
  254. item = {
  255. 'mode': st.st_mode,
  256. 'uid': st.st_uid, 'user': uid2user(st.st_uid),
  257. 'gid': st.st_gid, 'group': gid2group(st.st_gid),
  258. 'mtime': st.st_mtime,
  259. }
  260. try:
  261. xa = xattr(path, XATTR_NOFOLLOW)
  262. xattrs = {}
  263. for key in xa:
  264. # Only store the user namespace on Linux
  265. if linux and not key.startswith('user'):
  266. continue
  267. xattrs[key] = xa[key]
  268. if xattrs:
  269. item['xattrs'] = xattrs
  270. except IOError:
  271. pass
  272. return item
  273. def process_dir(self, path, st):
  274. item = {'path': path.lstrip('/\\:')}
  275. item.update(self.stat_attrs(st, path))
  276. self.add_item(item)
  277. def process_fifo(self, path, st):
  278. item = {'path': path.lstrip('/\\:')}
  279. item.update(self.stat_attrs(st, path))
  280. self.add_item(item)
  281. def process_symlink(self, path, st):
  282. source = os.readlink(path)
  283. item = {'path': path.lstrip('/\\:'), 'source': source}
  284. item.update(self.stat_attrs(st, path))
  285. self.add_item(item)
  286. def process_file(self, path, st, cache):
  287. safe_path = path.lstrip('/\\:')
  288. # Is it a hard link?
  289. if st.st_nlink > 1:
  290. source = self.hard_links.get((st.st_ino, st.st_dev))
  291. if (st.st_ino, st.st_dev) in self.hard_links:
  292. item = self.stat_attrs(st, path)
  293. item.update({'path': safe_path, 'source': source})
  294. self.add_item(item)
  295. return
  296. else:
  297. self.hard_links[st.st_ino, st.st_dev] = safe_path
  298. path_hash = self.key.id_hash(path)
  299. ids = cache.file_known_and_unchanged(path_hash, st)
  300. chunks = None
  301. if ids is not None:
  302. # Make sure all ids are available
  303. for id in ids:
  304. if not cache.seen_chunk(id):
  305. break
  306. else:
  307. chunks = [cache.chunk_incref(id, self.stats) for id in ids]
  308. # Only chunkify the file if needed
  309. if chunks is None:
  310. with open(path, 'rb') as fd:
  311. chunks = []
  312. for chunk in chunkify(fd, CHUNK_SIZE, WINDOW_SIZE,
  313. self.key.chunk_seed):
  314. chunks.append(cache.add_chunk(self.key.id_hash(chunk), chunk, self.stats))
  315. ids = [id for id, _, _ in chunks]
  316. cache.memorize_file(path_hash, st, ids)
  317. item = {'path': safe_path, 'chunks': chunks}
  318. item.update(self.stat_attrs(st, path))
  319. self.stats.nfiles += 1
  320. self.add_item(item)
  321. @staticmethod
  322. def list_archives(store, key, manifest, cache=None):
  323. for name, info in manifest.archives.items():
  324. archive = Archive(store, key, manifest, cache=cache)
  325. archive.load(info['id'])
  326. yield archive