archive.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380
  1. from __future__ import with_statement
  2. from datetime import datetime, timedelta
  3. from getpass import getuser
  4. import msgpack
  5. import os
  6. import socket
  7. import stat
  8. import sys
  9. import time
  10. from cStringIO import StringIO
  11. from xattr import xattr, XATTR_NOFOLLOW
  12. from ._speedups import chunkify
  13. from .helpers import uid2user, user2uid, gid2group, group2gid, \
  14. Counter, encode_filename, Statistics
  15. ITEMS_BUFFER = 1024 * 1024
  16. CHUNK_SIZE = 64 * 1024
  17. WINDOW_SIZE = 4096
  18. have_lchmod = hasattr(os, 'lchmod')
  19. linux = sys.platform == 'linux2'
  20. class Archive(object):
  21. class DoesNotExist(Exception):
  22. pass
  23. class AlreadyExists(Exception):
  24. pass
  25. def __init__(self, store, key, manifest, name, cache=None, create=False, checkpoint_interval=300):
  26. self.key = key
  27. self.store = store
  28. self.cache = cache
  29. self.manifest = manifest
  30. self.items = StringIO()
  31. self.items_ids = []
  32. self.hard_links = {}
  33. self.stats = Statistics()
  34. self.name = name
  35. self.checkpoint_interval = checkpoint_interval
  36. if create:
  37. if name in manifest.archives:
  38. raise self.AlreadyExists
  39. self.last_checkpoint = time.time()
  40. i = 0
  41. while True:
  42. self.checkpoint_name = '%s.checkpoint%s' % (name, i and ('.%d' % i) or '')
  43. if not self.checkpoint_name in manifest.archives:
  44. break
  45. i += 1
  46. else:
  47. try:
  48. info = self.manifest.archives[name]
  49. except KeyError:
  50. raise self.DoesNotExist
  51. self.load(info['id'])
  52. def load(self, id):
  53. self.id = id
  54. data = self.key.decrypt(self.id, self.store.get(self.id))
  55. self.metadata = msgpack.unpackb(data)
  56. if self.metadata['version'] != 1:
  57. raise Exception('Unknown archive metadata version')
  58. self.name = self.metadata['name']
  59. @property
  60. def ts(self):
  61. """Timestamp of archive creation in UTC"""
  62. t, f = self.metadata['time'].split('.', 1)
  63. return datetime.strptime(t, '%Y-%m-%dT%H:%M:%S') + timedelta(seconds=float('.' + f))
  64. def __repr__(self):
  65. return 'Archive(%r)' % self.name
  66. def iter_items(self, callback):
  67. unpacker = msgpack.Unpacker()
  68. counter = Counter(0)
  69. def cb(chunk, error, id):
  70. if error:
  71. raise error
  72. assert not error
  73. counter.dec()
  74. data = self.key.decrypt(id, chunk)
  75. unpacker.feed(data)
  76. for item in unpacker:
  77. callback(item)
  78. for id in self.metadata['items']:
  79. # Limit the number of concurrent items requests to 10
  80. self.store.flush_rpc(counter, 10)
  81. counter.inc()
  82. self.store.get(id, callback=cb, callback_data=id)
  83. def add_item(self, item):
  84. self.items.write(msgpack.packb(item))
  85. now = time.time()
  86. if now - self.last_checkpoint > self.checkpoint_interval:
  87. self.last_checkpoint = now
  88. self.write_checkpoint()
  89. if self.items.tell() > ITEMS_BUFFER:
  90. self.flush_items()
  91. def flush_items(self, flush=False):
  92. if self.items.tell() == 0:
  93. return
  94. self.items.seek(0)
  95. chunks = list(str(s) for s in chunkify(self.items, CHUNK_SIZE, WINDOW_SIZE, self.key.chunk_seed))
  96. self.items.seek(0)
  97. self.items.truncate()
  98. for chunk in chunks[:-1]:
  99. id, _, _ = self.cache.add_chunk(self.key.id_hash(chunk), chunk, self.stats)
  100. self.items_ids.append(id)
  101. if flush or len(chunks) == 1:
  102. id, _, _ = self.cache.add_chunk(self.key.id_hash(chunks[-1]), chunks[-1], self.stats)
  103. self.items_ids.append(id)
  104. else:
  105. self.items.write(chunks[-1])
  106. def write_checkpoint(self):
  107. self.save(self.checkpoint_name)
  108. del self.manifest.archives[self.checkpoint_name]
  109. self.cache.chunk_decref(self.id)
  110. def save(self, name=None):
  111. name = name or self.name
  112. if name in self.manifest.archives:
  113. raise self.AlreadyExists(name)
  114. self.flush_items(flush=True)
  115. metadata = {
  116. 'version': 1,
  117. 'name': name,
  118. 'items': self.items_ids,
  119. 'cmdline': sys.argv,
  120. 'hostname': socket.gethostname(),
  121. 'username': getuser(),
  122. 'time': datetime.utcnow().isoformat(),
  123. }
  124. data = msgpack.packb(metadata)
  125. self.id = self.key.id_hash(data)
  126. self.cache.add_chunk(self.id, data, self.stats)
  127. self.manifest.archives[name] = {'id': self.id, 'time': metadata['time']}
  128. self.manifest.write()
  129. self.store.commit()
  130. self.cache.commit()
  131. def calc_stats(self, cache):
  132. # This function is a bit evil since it abuses the cache to calculate
  133. # the stats. The cache transaction must be rolled back afterwards
  134. def cb(chunk, error, id):
  135. assert not error
  136. data = self.key.decrypt(id, chunk)
  137. unpacker.feed(data)
  138. for item in unpacker:
  139. try:
  140. for id, size, csize in item['chunks']:
  141. count, _, _ = self.cache.chunks[id]
  142. stats.update(size, csize, count==1)
  143. stats.nfiles += 1
  144. self.cache.chunks[id] = count - 1, size, csize
  145. except KeyError:
  146. pass
  147. unpacker = msgpack.Unpacker()
  148. cache.begin_txn()
  149. stats = Statistics()
  150. for id in self.metadata['items']:
  151. self.store.get(id, callback=cb, callback_data=id)
  152. count, size, csize = self.cache.chunks[id]
  153. stats.update(size, csize, count==1)
  154. self.cache.chunks[id] = count - 1, size, csize
  155. self.store.flush_rpc()
  156. cache.rollback()
  157. return stats
  158. def extract_item(self, item, dest=None, start_cb=None, restore_attrs=True):
  159. dest = dest or os.getcwdu()
  160. dir_stat_queue = []
  161. assert item['path'][0] not in ('/', '\\', ':')
  162. path = os.path.join(dest, encode_filename(item['path']))
  163. mode = item['mode']
  164. if stat.S_ISDIR(mode):
  165. if not os.path.exists(path):
  166. os.makedirs(path)
  167. if restore_attrs:
  168. self.restore_attrs(path, item)
  169. elif stat.S_ISFIFO(mode):
  170. if not os.path.exists(os.path.dirname(path)):
  171. os.makedirs(os.path.dirname(path))
  172. os.mkfifo(path)
  173. self.restore_attrs(path, item)
  174. elif stat.S_ISLNK(mode):
  175. if not os.path.exists(os.path.dirname(path)):
  176. os.makedirs(os.path.dirname(path))
  177. source = item['source']
  178. if os.path.exists(path):
  179. os.unlink(path)
  180. os.symlink(source, path)
  181. self.restore_attrs(path, item, symlink=True)
  182. elif stat.S_ISREG(mode):
  183. if not os.path.exists(os.path.dirname(path)):
  184. os.makedirs(os.path.dirname(path))
  185. # Hard link?
  186. if 'source' in item:
  187. def link_cb(_, __, item):
  188. source = os.path.join(dest, item['source'])
  189. if os.path.exists(path):
  190. os.unlink(path)
  191. os.link(source, path)
  192. self.store.add_callback(link_cb, item)
  193. else:
  194. def extract_cb(chunk, error, (id, i)):
  195. if i == 0:
  196. state['fd'] = open(path, 'wb')
  197. start_cb(item)
  198. assert not error
  199. data = self.key.decrypt(id, chunk)
  200. state['fd'].write(data)
  201. if i == n - 1:
  202. state['fd'].close()
  203. self.restore_attrs(path, item)
  204. state = {}
  205. n = len(item['chunks'])
  206. ## 0 chunks indicates an empty (0 bytes) file
  207. if n == 0:
  208. open(path, 'wb').close()
  209. start_cb(item)
  210. self.restore_attrs(path, item)
  211. else:
  212. for i, (id, size, csize) in enumerate(item['chunks']):
  213. self.store.get(id, callback=extract_cb, callback_data=(id, i))
  214. else:
  215. raise Exception('Unknown archive item type %r' % item['mode'])
  216. def restore_attrs(self, path, item, symlink=False):
  217. xattrs = item.get('xattrs')
  218. if xattrs:
  219. xa = xattr(path, XATTR_NOFOLLOW)
  220. for k, v in xattrs.items():
  221. try:
  222. xa.set(k, v)
  223. except (IOError, KeyError):
  224. pass
  225. if have_lchmod:
  226. os.lchmod(path, item['mode'])
  227. elif not symlink:
  228. os.chmod(path, item['mode'])
  229. uid = user2uid(item['user']) or item['uid']
  230. gid = group2gid(item['group']) or item['gid']
  231. try:
  232. os.lchown(path, uid, gid)
  233. except OSError:
  234. pass
  235. if not symlink:
  236. # FIXME: We should really call futimes here (c extension required)
  237. os.utime(path, (item['mtime'], item['mtime']))
  238. def verify_file(self, item, start, result):
  239. def verify_chunk(chunk, error, (id, i)):
  240. if error:
  241. if not state:
  242. result(item, False)
  243. state[True] = True
  244. return
  245. if i == 0:
  246. start(item)
  247. data = self.key.decrypt(id, chunk)
  248. if i == n - 1:
  249. result(item, True)
  250. state = {}
  251. n = len(item['chunks'])
  252. if n == 0:
  253. start(item)
  254. result(item, True)
  255. else:
  256. for i, (id, size, csize) in enumerate(item['chunks']):
  257. self.store.get(id, callback=verify_chunk, callback_data=(id, i))
  258. def delete(self, cache):
  259. def callback(chunk, error, id):
  260. assert not error
  261. data = self.key.decrypt(id, chunk)
  262. unpacker.feed(data)
  263. for item in unpacker:
  264. try:
  265. for chunk_id, size, csize in item['chunks']:
  266. self.cache.chunk_decref(chunk_id)
  267. except KeyError:
  268. pass
  269. self.cache.chunk_decref(id)
  270. unpacker = msgpack.Unpacker()
  271. for id in self.metadata['items']:
  272. self.store.get(id, callback=callback, callback_data=id)
  273. self.store.flush_rpc()
  274. self.cache.chunk_decref(self.id)
  275. del self.manifest.archives[self.name]
  276. self.manifest.write()
  277. self.store.commit()
  278. cache.commit()
  279. def stat_attrs(self, st, path):
  280. item = {
  281. 'mode': st.st_mode,
  282. 'uid': st.st_uid, 'user': uid2user(st.st_uid),
  283. 'gid': st.st_gid, 'group': gid2group(st.st_gid),
  284. 'mtime': st.st_mtime,
  285. }
  286. try:
  287. xa = xattr(path, XATTR_NOFOLLOW)
  288. xattrs = {}
  289. for key in xa:
  290. # Only store the user namespace on Linux
  291. if linux and not key.startswith('user'):
  292. continue
  293. xattrs[key] = xa[key]
  294. if xattrs:
  295. item['xattrs'] = xattrs
  296. except IOError:
  297. pass
  298. return item
  299. def process_dir(self, path, st):
  300. item = {'path': path.lstrip('/\\:')}
  301. item.update(self.stat_attrs(st, path))
  302. self.add_item(item)
  303. def process_fifo(self, path, st):
  304. item = {'path': path.lstrip('/\\:')}
  305. item.update(self.stat_attrs(st, path))
  306. self.add_item(item)
  307. def process_symlink(self, path, st):
  308. source = os.readlink(path)
  309. item = {'path': path.lstrip('/\\:'), 'source': source}
  310. item.update(self.stat_attrs(st, path))
  311. self.add_item(item)
  312. def process_file(self, path, st, cache):
  313. safe_path = path.lstrip('/\\:')
  314. # Is it a hard link?
  315. if st.st_nlink > 1:
  316. source = self.hard_links.get((st.st_ino, st.st_dev))
  317. if (st.st_ino, st.st_dev) in self.hard_links:
  318. item = self.stat_attrs(st, path)
  319. item.update({'path': safe_path, 'source': source})
  320. self.add_item(item)
  321. return
  322. else:
  323. self.hard_links[st.st_ino, st.st_dev] = safe_path
  324. path_hash = self.key.id_hash(path)
  325. ids = cache.file_known_and_unchanged(path_hash, st)
  326. chunks = None
  327. if ids is not None:
  328. # Make sure all ids are available
  329. for id in ids:
  330. if not cache.seen_chunk(id):
  331. break
  332. else:
  333. chunks = [cache.chunk_incref(id, self.stats) for id in ids]
  334. # Only chunkify the file if needed
  335. if chunks is None:
  336. with open(path, 'rb') as fd:
  337. chunks = []
  338. for chunk in chunkify(fd, CHUNK_SIZE, WINDOW_SIZE,
  339. self.key.chunk_seed):
  340. chunks.append(cache.add_chunk(self.key.id_hash(chunk), chunk, self.stats))
  341. ids = [id for id, _, _ in chunks]
  342. cache.memorize_file(path_hash, st, ids)
  343. item = {'path': safe_path, 'chunks': chunks}
  344. item.update(self.stat_attrs(st, path))
  345. self.stats.nfiles += 1
  346. self.add_item(item)
  347. @staticmethod
  348. def list_archives(store, key, manifest, cache=None):
  349. for name, info in manifest.archives.items():
  350. yield Archive(store, key, manifest, name, cache=cache)