upgrader.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260
  1. from binascii import hexlify
  2. import datetime
  3. import logging
  4. logger = logging.getLogger(__name__)
  5. import os
  6. import shutil
  7. import sys
  8. import time
  9. from .helpers import get_keys_dir, get_cache_dir
  10. from .locking import UpgradableLock
  11. from .repository import Repository, MAGIC
  12. from .key import KeyfileKey, KeyfileNotFoundError
  13. ATTIC_MAGIC = b'ATTICSEG'
  14. class AtticRepositoryUpgrader(Repository):
  15. def upgrade(self, dryrun=True, inplace=False):
  16. """convert an attic repository to a borg repository
  17. those are the files that need to be upgraded here, from most
  18. important to least important: segments, key files, and various
  19. caches, the latter being optional, as they will be rebuilt if
  20. missing.
  21. we nevertheless do the order in reverse, as we prefer to do
  22. the fast stuff first, to improve interactivity.
  23. """
  24. backup = None
  25. if not inplace:
  26. backup = '{}.upgrade-{:%Y-%m-%d-%H:%M:%S}'.format(self.path, datetime.datetime.now())
  27. logger.info('making a hardlink copy in %s', backup)
  28. if not dryrun:
  29. shutil.copytree(self.path, backup, copy_function=os.link)
  30. logger.info("opening attic repository with borg and converting")
  31. # we need to open the repo to load configuration, keyfiles and segments
  32. self.open(self.path, exclusive=False)
  33. segments = [filename for i, filename in self.io.segment_iterator()]
  34. try:
  35. keyfile = self.find_attic_keyfile()
  36. except KeyfileNotFoundError:
  37. logger.warning("no key file found for repository")
  38. else:
  39. self.convert_keyfiles(keyfile, dryrun)
  40. self.close()
  41. # partial open: just hold on to the lock
  42. self.lock = UpgradableLock(os.path.join(self.path, 'lock'),
  43. exclusive=True).acquire()
  44. try:
  45. self.convert_cache(dryrun)
  46. self.convert_segments(segments, dryrun=dryrun, inplace=inplace)
  47. finally:
  48. self.lock.release()
  49. self.lock = None
  50. return backup
  51. @staticmethod
  52. def convert_segments(segments, dryrun=True, inplace=False):
  53. """convert repository segments from attic to borg
  54. replacement pattern is `s/ATTICSEG/BORG_SEG/` in files in
  55. `$ATTIC_REPO/data/**`.
  56. luckily the magic string length didn't change so we can just
  57. replace the 8 first bytes of all regular files in there."""
  58. logger.info("converting %d segments..." % len(segments))
  59. i = 0
  60. for filename in segments:
  61. i += 1
  62. print("\rconverting segment %d/%d, %.2f%% done (%s)"
  63. % (i, len(segments), 100*float(i)/len(segments), filename),
  64. end='', file=sys.stderr)
  65. if dryrun:
  66. time.sleep(0.001)
  67. else:
  68. AtticRepositoryUpgrader.header_replace(filename, ATTIC_MAGIC, MAGIC, inplace=inplace)
  69. print(file=sys.stderr)
  70. @staticmethod
  71. def header_replace(filename, old_magic, new_magic, inplace=True):
  72. with open(filename, 'r+b') as segment:
  73. segment.seek(0)
  74. # only write if necessary
  75. if segment.read(len(old_magic)) == old_magic:
  76. if inplace:
  77. segment.seek(0)
  78. segment.write(new_magic)
  79. else:
  80. # remove the hardlink and rewrite the file. this
  81. # works because our old file handle is still open
  82. # so even though the file is removed, we can still
  83. # read it until the file is closed.
  84. os.rename(filename, filename + '.tmp')
  85. with open(filename, 'wb') as new_segment:
  86. new_segment.write(new_magic)
  87. new_segment.write(segment.read())
  88. # the little dance with the .tmp file is necessary
  89. # because Windows won't allow overwriting an open
  90. # file.
  91. os.unlink(filename + '.tmp')
  92. def find_attic_keyfile(self):
  93. """find the attic keyfiles
  94. the keyfiles are loaded by `KeyfileKey.find_key_file()`. that
  95. finds the keys with the right identifier for the repo.
  96. this is expected to look into $HOME/.attic/keys or
  97. $ATTIC_KEYS_DIR for key files matching the given Borg
  98. repository.
  99. it is expected to raise an exception (KeyfileNotFoundError) if
  100. no key is found. whether that exception is from Borg or Attic
  101. is unclear.
  102. this is split in a separate function in case we want to use
  103. the attic code here directly, instead of our local
  104. implementation."""
  105. return AtticKeyfileKey.find_key_file(self)
  106. @staticmethod
  107. def convert_keyfiles(keyfile, dryrun):
  108. """convert key files from attic to borg
  109. replacement pattern is `s/ATTIC KEY/BORG_KEY/` in
  110. `get_keys_dir()`, that is `$ATTIC_KEYS_DIR` or
  111. `$HOME/.attic/keys`, and moved to `$BORG_KEYS_DIR` or
  112. `$HOME/.borg/keys`.
  113. no need to decrypt to convert. we need to rewrite the whole
  114. key file because magic string length changed, but that's not a
  115. problem because the keyfiles are small (compared to, say,
  116. all the segments)."""
  117. logger.info("converting keyfile %s" % keyfile)
  118. with open(keyfile, 'r') as f:
  119. data = f.read()
  120. data = data.replace(AtticKeyfileKey.FILE_ID, KeyfileKey.FILE_ID, 1)
  121. keyfile = os.path.join(get_keys_dir(), os.path.basename(keyfile))
  122. logger.info("writing borg keyfile to %s" % keyfile)
  123. if not dryrun:
  124. with open(keyfile, 'w') as f:
  125. f.write(data)
  126. def convert_cache(self, dryrun):
  127. """convert caches from attic to borg
  128. those are all hash indexes, so we need to
  129. `s/ATTICIDX/BORG_IDX/` in a few locations:
  130. * the repository index (in `$ATTIC_REPO/index.%d`, where `%d`
  131. is the `Repository.get_index_transaction_id()`), which we
  132. should probably update, with a lock, see
  133. `Repository.open()`, which i'm not sure we should use
  134. because it may write data on `Repository.close()`...
  135. * the `files` and `chunks` cache (in `$ATTIC_CACHE_DIR` or
  136. `$HOME/.cache/attic/<repoid>/`), which we could just drop,
  137. but if we'd want to convert, we could open it with the
  138. `Cache.open()`, edit in place and then `Cache.close()` to
  139. make sure we have locking right
  140. """
  141. transaction_id = self.get_index_transaction_id()
  142. if transaction_id is None:
  143. logger.warning('no index file found for repository %s' % self.path)
  144. else:
  145. index = os.path.join(self.path, 'index.%d' % transaction_id).encode('utf-8')
  146. logger.info("converting index index %s" % index)
  147. if not dryrun:
  148. AtticRepositoryUpgrader.header_replace(index, b'ATTICIDX', b'BORG_IDX')
  149. # copy of attic's get_cache_dir()
  150. attic_cache_dir = os.environ.get('ATTIC_CACHE_DIR',
  151. os.path.join(os.path.expanduser('~'),
  152. '.cache', 'attic'))
  153. attic_cache_dir = os.path.join(attic_cache_dir, hexlify(self.id).decode('ascii'))
  154. borg_cache_dir = os.path.join(get_cache_dir(), hexlify(self.id).decode('ascii'))
  155. def copy_cache_file(path):
  156. """copy the given attic cache path into the borg directory
  157. does nothing if dryrun is True. also expects
  158. attic_cache_dir and borg_cache_dir to be set in the parent
  159. scope, to the directories path including the repository
  160. identifier.
  161. :params path: the basename of the cache file to copy
  162. (example: "files" or "chunks") as a string
  163. :returns: the borg file that was created or None if no
  164. Attic cache file was found.
  165. """
  166. attic_file = os.path.join(attic_cache_dir, path)
  167. if os.path.exists(attic_file):
  168. borg_file = os.path.join(borg_cache_dir, path)
  169. if os.path.exists(borg_file):
  170. logger.warning("borg cache file already exists in %s, not copying from Attic", borg_file)
  171. else:
  172. logger.info("copying attic cache file from %s to %s" % (attic_file, borg_file))
  173. if not dryrun:
  174. shutil.copyfile(attic_file, borg_file)
  175. return borg_file
  176. else:
  177. logger.warning("no %s cache file found in %s" % (path, attic_file))
  178. return None
  179. # XXX: untested, because generating cache files is a PITA, see
  180. # Archiver.do_create() for proof
  181. if os.path.exists(attic_cache_dir):
  182. if not os.path.exists(borg_cache_dir):
  183. os.makedirs(borg_cache_dir)
  184. # file that we don't have a header to convert, just copy
  185. for cache in ['config', 'files']:
  186. copy_cache_file(cache)
  187. # we need to convert the headers of those files, copy first
  188. for cache in ['chunks']:
  189. cache = copy_cache_file(cache)
  190. logger.info("converting cache %s" % cache)
  191. if not dryrun:
  192. AtticRepositoryUpgrader.header_replace(cache, b'ATTICIDX', b'BORG_IDX')
  193. class AtticKeyfileKey(KeyfileKey):
  194. """backwards compatible Attic key file parser"""
  195. FILE_ID = 'ATTIC KEY'
  196. # verbatim copy from attic
  197. @staticmethod
  198. def get_keys_dir():
  199. """Determine where to repository keys and cache"""
  200. return os.environ.get('ATTIC_KEYS_DIR',
  201. os.path.join(os.path.expanduser('~'), '.attic', 'keys'))
  202. @classmethod
  203. def find_key_file(cls, repository):
  204. """copy of attic's `find_key_file`_
  205. this has two small modifications:
  206. 1. it uses the above `get_keys_dir`_ instead of the global one,
  207. assumed to be borg's
  208. 2. it uses `repository.path`_ instead of
  209. `repository._location.canonical_path`_ because we can't
  210. assume the repository has been opened by the archiver yet
  211. """
  212. get_keys_dir = cls.get_keys_dir
  213. id = hexlify(repository.id).decode('ascii')
  214. keys_dir = get_keys_dir()
  215. for name in os.listdir(keys_dir):
  216. filename = os.path.join(keys_dir, name)
  217. with open(filename, 'r') as fd:
  218. line = fd.readline().strip()
  219. if line and line.startswith(cls.FILE_ID) and line[10:] == id:
  220. return filename
  221. raise KeyfileNotFoundError(repository.path, get_keys_dir())