cache.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291
  1. from configparser import RawConfigParser
  2. from attic.remote import cache_if_remote
  3. import msgpack
  4. import os
  5. import sys
  6. from binascii import hexlify
  7. import shutil
  8. from .key import PlaintextKey
  9. from .helpers import Error, get_cache_dir, decode_dict, st_mtime_ns, unhexlify, UpgradableLock, int_to_bigint, \
  10. bigint_to_int
  11. from .hashindex import ChunkIndex
  12. class Cache:
  13. """Client Side cache
  14. """
  15. class RepositoryReplay(Error):
  16. """Cache is newer than repository, refusing to continue"""
  17. class CacheInitAbortedError(Error):
  18. """Cache initialization aborted"""
  19. class RepositoryAccessAborted(Error):
  20. """Repository access aborted"""
  21. class EncryptionMethodMismatch(Error):
  22. """Repository encryption method changed since last acccess, refusing to continue
  23. """
  24. def __init__(self, repository, key, manifest, path=None, sync=True, do_files=False, warn_if_unencrypted=True):
  25. self.lock = None
  26. self.timestamp = None
  27. self.lock = None
  28. self.txn_active = False
  29. self.repository = repository
  30. self.key = key
  31. self.manifest = manifest
  32. self.path = path or os.path.join(get_cache_dir(), hexlify(repository.id).decode('ascii'))
  33. self.do_files = do_files
  34. # Warn user before sending data to a never seen before unencrypted repository
  35. if not os.path.exists(self.path):
  36. if warn_if_unencrypted and isinstance(key, PlaintextKey):
  37. if not self._confirm('Warning: Attempting to access a previously unknown unencrypted repository',
  38. 'BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK'):
  39. raise self.CacheInitAbortedError()
  40. self.create()
  41. self.open()
  42. # Warn user before sending data to a relocated repository
  43. if self.previous_location and self.previous_location != repository._location.canonical_path():
  44. msg = 'Warning: The repository at location {} was previously located at {}'.format(repository._location.canonical_path(), self.previous_location)
  45. if not self._confirm(msg, 'BORG_RELOCATED_REPO_ACCESS_IS_OK'):
  46. raise self.RepositoryAccessAborted()
  47. if sync and self.manifest.id != self.manifest_id:
  48. # If repository is older than the cache something fishy is going on
  49. if self.timestamp and self.timestamp > manifest.timestamp:
  50. raise self.RepositoryReplay()
  51. # Make sure an encrypted repository has not been swapped for an unencrypted repository
  52. if self.key_type is not None and self.key_type != str(key.TYPE):
  53. raise self.EncryptionMethodMismatch()
  54. self.sync()
  55. self.commit()
  56. def __del__(self):
  57. self.close()
  58. def _confirm(self, message, env_var_override=None):
  59. print(message, file=sys.stderr)
  60. if env_var_override and os.environ.get(env_var_override):
  61. print("Yes (From {})".format(env_var_override))
  62. return True
  63. if not sys.stdin.isatty():
  64. return False
  65. try:
  66. answer = input('Do you want to continue? [yN] ')
  67. except EOFError:
  68. return False
  69. return answer and answer in 'Yy'
  70. def create(self):
  71. """Create a new empty cache at `self.path`
  72. """
  73. os.makedirs(self.path)
  74. with open(os.path.join(self.path, 'README'), 'w') as fd:
  75. fd.write('This is a Borg cache')
  76. config = RawConfigParser()
  77. config.add_section('cache')
  78. config.set('cache', 'version', '1')
  79. config.set('cache', 'repository', hexlify(self.repository.id).decode('ascii'))
  80. config.set('cache', 'manifest', '')
  81. with open(os.path.join(self.path, 'config'), 'w') as fd:
  82. config.write(fd)
  83. ChunkIndex().write(os.path.join(self.path, 'chunks').encode('utf-8'))
  84. with open(os.path.join(self.path, 'files'), 'w') as fd:
  85. pass # empty file
  86. def destroy(self):
  87. """destroy the cache at `self.path`
  88. """
  89. self.close()
  90. os.remove(os.path.join(self.path, 'config')) # kill config first
  91. shutil.rmtree(self.path)
  92. def _do_open(self):
  93. self.config = RawConfigParser()
  94. self.config.read(os.path.join(self.path, 'config'))
  95. if self.config.getint('cache', 'version') != 1:
  96. raise Exception('%s Does not look like a Borg cache')
  97. self.id = self.config.get('cache', 'repository')
  98. self.manifest_id = unhexlify(self.config.get('cache', 'manifest'))
  99. self.timestamp = self.config.get('cache', 'timestamp', fallback=None)
  100. self.key_type = self.config.get('cache', 'key_type', fallback=None)
  101. self.previous_location = self.config.get('cache', 'previous_location', fallback=None)
  102. self.chunks = ChunkIndex.read(os.path.join(self.path, 'chunks').encode('utf-8'))
  103. self.files = None
  104. def open(self):
  105. if not os.path.isdir(self.path):
  106. raise Exception('%s Does not look like a Borg cache' % self.path)
  107. self.lock = UpgradableLock(os.path.join(self.path, 'config'), exclusive=True)
  108. self.rollback()
  109. def close(self):
  110. if self.lock:
  111. self.lock.release()
  112. self.lock = None
  113. def _read_files(self):
  114. self.files = {}
  115. self._newest_mtime = 0
  116. with open(os.path.join(self.path, 'files'), 'rb') as fd:
  117. u = msgpack.Unpacker(use_list=True)
  118. while True:
  119. data = fd.read(64 * 1024)
  120. if not data:
  121. break
  122. u.feed(data)
  123. for path_hash, item in u:
  124. item[0] += 1
  125. # in the end, this takes about 240 Bytes per file
  126. self.files[path_hash] = msgpack.packb(item)
  127. def begin_txn(self):
  128. # Initialize transaction snapshot
  129. txn_dir = os.path.join(self.path, 'txn.tmp')
  130. os.mkdir(txn_dir)
  131. shutil.copy(os.path.join(self.path, 'config'), txn_dir)
  132. shutil.copy(os.path.join(self.path, 'chunks'), txn_dir)
  133. shutil.copy(os.path.join(self.path, 'files'), txn_dir)
  134. os.rename(os.path.join(self.path, 'txn.tmp'),
  135. os.path.join(self.path, 'txn.active'))
  136. self.txn_active = True
  137. def commit(self):
  138. """Commit transaction
  139. """
  140. if not self.txn_active:
  141. return
  142. if self.files is not None:
  143. with open(os.path.join(self.path, 'files'), 'wb') as fd:
  144. for path_hash, item in self.files.items():
  145. # Discard cached files with the newest mtime to avoid
  146. # issues with filesystem snapshots and mtime precision
  147. item = msgpack.unpackb(item)
  148. if item[0] < 10 and bigint_to_int(item[3]) < self._newest_mtime:
  149. msgpack.pack((path_hash, item), fd)
  150. self.config.set('cache', 'manifest', hexlify(self.manifest.id).decode('ascii'))
  151. self.config.set('cache', 'timestamp', self.manifest.timestamp)
  152. self.config.set('cache', 'key_type', str(self.key.TYPE))
  153. self.config.set('cache', 'previous_location', self.repository._location.canonical_path())
  154. with open(os.path.join(self.path, 'config'), 'w') as fd:
  155. self.config.write(fd)
  156. self.chunks.write(os.path.join(self.path, 'chunks').encode('utf-8'))
  157. os.rename(os.path.join(self.path, 'txn.active'),
  158. os.path.join(self.path, 'txn.tmp'))
  159. shutil.rmtree(os.path.join(self.path, 'txn.tmp'))
  160. self.txn_active = False
  161. def rollback(self):
  162. """Roll back partial and aborted transactions
  163. """
  164. # Remove partial transaction
  165. if os.path.exists(os.path.join(self.path, 'txn.tmp')):
  166. shutil.rmtree(os.path.join(self.path, 'txn.tmp'))
  167. # Roll back active transaction
  168. txn_dir = os.path.join(self.path, 'txn.active')
  169. if os.path.exists(txn_dir):
  170. shutil.copy(os.path.join(txn_dir, 'config'), self.path)
  171. shutil.copy(os.path.join(txn_dir, 'chunks'), self.path)
  172. shutil.copy(os.path.join(txn_dir, 'files'), self.path)
  173. os.rename(txn_dir, os.path.join(self.path, 'txn.tmp'))
  174. if os.path.exists(os.path.join(self.path, 'txn.tmp')):
  175. shutil.rmtree(os.path.join(self.path, 'txn.tmp'))
  176. self.txn_active = False
  177. self._do_open()
  178. def sync(self):
  179. """Initializes cache by fetching and reading all archive indicies
  180. """
  181. def add(id, size, csize):
  182. try:
  183. count, size, csize = self.chunks[id]
  184. self.chunks[id] = count + 1, size, csize
  185. except KeyError:
  186. self.chunks[id] = 1, size, csize
  187. self.begin_txn()
  188. print('Initializing cache...')
  189. self.chunks.clear()
  190. unpacker = msgpack.Unpacker()
  191. repository = cache_if_remote(self.repository)
  192. for name, info in self.manifest.archives.items():
  193. archive_id = info[b'id']
  194. cdata = repository.get(archive_id)
  195. data = self.key.decrypt(archive_id, cdata)
  196. add(archive_id, len(data), len(cdata))
  197. archive = msgpack.unpackb(data)
  198. if archive[b'version'] != 1:
  199. raise Exception('Unknown archive metadata version')
  200. decode_dict(archive, (b'name',))
  201. print('Analyzing archive:', archive[b'name'])
  202. for key, chunk in zip(archive[b'items'], repository.get_many(archive[b'items'])):
  203. data = self.key.decrypt(key, chunk)
  204. add(key, len(data), len(chunk))
  205. unpacker.feed(data)
  206. for item in unpacker:
  207. if b'chunks' in item:
  208. for chunk_id, size, csize in item[b'chunks']:
  209. add(chunk_id, size, csize)
  210. def add_chunk(self, id, data, stats):
  211. if not self.txn_active:
  212. self.begin_txn()
  213. if self.seen_chunk(id):
  214. return self.chunk_incref(id, stats)
  215. size = len(data)
  216. data = self.key.encrypt(data)
  217. csize = len(data)
  218. self.repository.put(id, data, wait=False)
  219. self.chunks[id] = (1, size, csize)
  220. stats.update(size, csize, True)
  221. return id, size, csize
  222. def seen_chunk(self, id):
  223. return self.chunks.get(id, (0, 0, 0))[0]
  224. def chunk_incref(self, id, stats):
  225. if not self.txn_active:
  226. self.begin_txn()
  227. count, size, csize = self.chunks[id]
  228. self.chunks[id] = (count + 1, size, csize)
  229. stats.update(size, csize, False)
  230. return id, size, csize
  231. def chunk_decref(self, id, stats):
  232. if not self.txn_active:
  233. self.begin_txn()
  234. count, size, csize = self.chunks[id]
  235. if count == 1:
  236. del self.chunks[id]
  237. self.repository.delete(id, wait=False)
  238. stats.update(-size, -csize, True)
  239. else:
  240. self.chunks[id] = (count - 1, size, csize)
  241. stats.update(-size, -csize, False)
  242. def file_known_and_unchanged(self, path_hash, st):
  243. if not self.do_files:
  244. return None
  245. if self.files is None:
  246. self._read_files()
  247. entry = self.files.get(path_hash)
  248. if not entry:
  249. return None
  250. entry = msgpack.unpackb(entry)
  251. if entry[2] == st.st_size and bigint_to_int(entry[3]) == st_mtime_ns(st) and entry[1] == st.st_ino:
  252. # reset entry age
  253. entry[0] = 0
  254. self.files[path_hash] = msgpack.packb(entry)
  255. return entry[4]
  256. else:
  257. return None
  258. def memorize_file(self, path_hash, st, ids):
  259. if not self.do_files:
  260. return
  261. # Entry: Age, inode, size, mtime, chunk ids
  262. mtime_ns = st_mtime_ns(st)
  263. self.files[path_hash] = msgpack.packb((0, st.st_ino, st.st_size, int_to_bigint(mtime_ns), ids))
  264. self._newest_mtime = max(self._newest_mtime, mtime_ns)