upgrader.py 9.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233
  1. from binascii import hexlify
  2. import os
  3. import shutil
  4. import time
  5. from .helpers import get_keys_dir, get_cache_dir
  6. from .locking import UpgradableLock
  7. from .repository import Repository, MAGIC
  8. from .key import KeyfileKey, KeyfileNotFoundError
  9. ATTIC_MAGIC = b'ATTICSEG'
  10. class AtticRepositoryUpgrader(Repository):
  11. def upgrade(self, dryrun=True):
  12. """convert an attic repository to a borg repository
  13. those are the files that need to be upgraded here, from most
  14. important to least important: segments, key files, and various
  15. caches, the latter being optional, as they will be rebuilt if
  16. missing.
  17. we nevertheless do the order in reverse, as we prefer to do
  18. the fast stuff first, to improve interactivity.
  19. """
  20. print("reading segments from attic repository using borg")
  21. # we need to open it to load the configuration and other fields
  22. self.open(self.path, exclusive=False)
  23. segments = [filename for i, filename in self.io.segment_iterator()]
  24. try:
  25. keyfile = self.find_attic_keyfile()
  26. except KeyfileNotFoundError:
  27. print("no key file found for repository")
  28. else:
  29. self.convert_keyfiles(keyfile, dryrun)
  30. self.close()
  31. # partial open: just hold on to the lock
  32. self.lock = UpgradableLock(os.path.join(self.path, 'lock'),
  33. exclusive=True).acquire()
  34. try:
  35. self.convert_cache(dryrun)
  36. self.convert_segments(segments, dryrun)
  37. finally:
  38. self.lock.release()
  39. self.lock = None
  40. @staticmethod
  41. def convert_segments(segments, dryrun):
  42. """convert repository segments from attic to borg
  43. replacement pattern is `s/ATTICSEG/BORG_SEG/` in files in
  44. `$ATTIC_REPO/data/**`.
  45. luckily the magic string length didn't change so we can just
  46. replace the 8 first bytes of all regular files in there."""
  47. print("converting %d segments..." % len(segments))
  48. i = 0
  49. for filename in segments:
  50. i += 1
  51. print("\rconverting segment %d/%d in place, %.2f%% done (%s)"
  52. % (i, len(segments), 100*float(i)/len(segments), filename), end='')
  53. if dryrun:
  54. time.sleep(0.001)
  55. else:
  56. AtticRepositoryUpgrader.header_replace(filename, ATTIC_MAGIC, MAGIC)
  57. print()
  58. @staticmethod
  59. def header_replace(filename, old_magic, new_magic):
  60. with open(filename, 'r+b') as segment:
  61. segment.seek(0)
  62. # only write if necessary
  63. if segment.read(len(old_magic)) == old_magic:
  64. segment.seek(0)
  65. segment.write(new_magic)
  66. def find_attic_keyfile(self):
  67. """find the attic keyfiles
  68. the keyfiles are loaded by `KeyfileKey.find_key_file()`. that
  69. finds the keys with the right identifier for the repo.
  70. this is expected to look into $HOME/.attic/keys or
  71. $ATTIC_KEYS_DIR for key files matching the given Borg
  72. repository.
  73. it is expected to raise an exception (KeyfileNotFoundError) if
  74. no key is found. whether that exception is from Borg or Attic
  75. is unclear.
  76. this is split in a separate function in case we want to use
  77. the attic code here directly, instead of our local
  78. implementation."""
  79. return AtticKeyfileKey.find_key_file(self)
  80. @staticmethod
  81. def convert_keyfiles(keyfile, dryrun):
  82. """convert key files from attic to borg
  83. replacement pattern is `s/ATTIC KEY/BORG_KEY/` in
  84. `get_keys_dir()`, that is `$ATTIC_KEYS_DIR` or
  85. `$HOME/.attic/keys`, and moved to `$BORG_KEYS_DIR` or
  86. `$HOME/.borg/keys`.
  87. no need to decrypt to convert. we need to rewrite the whole
  88. key file because magic string length changed, but that's not a
  89. problem because the keyfiles are small (compared to, say,
  90. all the segments)."""
  91. print("converting keyfile %s" % keyfile)
  92. with open(keyfile, 'r') as f:
  93. data = f.read()
  94. data = data.replace(AtticKeyfileKey.FILE_ID, KeyfileKey.FILE_ID, 1)
  95. keyfile = os.path.join(get_keys_dir(), os.path.basename(keyfile))
  96. print("writing borg keyfile to %s" % keyfile)
  97. if not dryrun:
  98. with open(keyfile, 'w') as f:
  99. f.write(data)
  100. def convert_cache(self, dryrun):
  101. """convert caches from attic to borg
  102. those are all hash indexes, so we need to
  103. `s/ATTICIDX/BORG_IDX/` in a few locations:
  104. * the repository index (in `$ATTIC_REPO/index.%d`, where `%d`
  105. is the `Repository.get_index_transaction_id()`), which we
  106. should probably update, with a lock, see
  107. `Repository.open()`, which i'm not sure we should use
  108. because it may write data on `Repository.close()`...
  109. * the `files` and `chunks` cache (in `$ATTIC_CACHE_DIR` or
  110. `$HOME/.cache/attic/<repoid>/`), which we could just drop,
  111. but if we'd want to convert, we could open it with the
  112. `Cache.open()`, edit in place and then `Cache.close()` to
  113. make sure we have locking right
  114. """
  115. caches = []
  116. transaction_id = self.get_index_transaction_id()
  117. if transaction_id is None:
  118. print('no index file found for repository %s' % self.path)
  119. else:
  120. caches += [os.path.join(self.path, 'index.%d' % transaction_id).encode('utf-8')]
  121. # copy of attic's get_cache_dir()
  122. attic_cache_dir = os.environ.get('ATTIC_CACHE_DIR',
  123. os.path.join(os.path.expanduser('~'),
  124. '.cache', 'attic'))
  125. attic_cache_dir = os.path.join(attic_cache_dir, hexlify(self.id).decode('ascii'))
  126. borg_cache_dir = os.path.join(get_cache_dir(), hexlify(self.id).decode('ascii'))
  127. def copy_cache_file(path):
  128. """copy the given attic cache path into the borg directory
  129. does nothing if dryrun is True. also expects
  130. attic_cache_dir and borg_cache_dir to be set in the parent
  131. scope, to the directories path including the repository
  132. identifier.
  133. :params path: the basename of the cache file to copy
  134. (example: "files" or "chunks") as a string
  135. :returns: the borg file that was created or None if non
  136. was created.
  137. """
  138. attic_file = os.path.join(attic_cache_dir, path)
  139. if os.path.exists(attic_file):
  140. borg_file = os.path.join(borg_cache_dir, path)
  141. if os.path.exists(borg_file):
  142. print("borg cache file already exists in %s, skipping conversion of %s" % (borg_file, attic_file))
  143. else:
  144. print("copying attic cache file from %s to %s" % (attic_file, borg_file))
  145. if not dryrun:
  146. shutil.copyfile(attic_file, borg_file)
  147. return borg_file
  148. else:
  149. print("no %s cache file found in %s" % (path, attic_file))
  150. return None
  151. # XXX: untested, because generating cache files is a PITA, see
  152. # Archiver.do_create() for proof
  153. if os.path.exists(attic_cache_dir):
  154. if not os.path.exists(borg_cache_dir):
  155. os.makedirs(borg_cache_dir)
  156. # file that we don't have a header to convert, just copy
  157. for cache in ['config', 'files']:
  158. copy_cache_file(cache)
  159. # we need to convert the headers of those files, copy first
  160. for cache in ['chunks']:
  161. copied = copy_cache_file(cache)
  162. if copied:
  163. print("converting cache %s" % cache)
  164. if not dryrun:
  165. AtticRepositoryUpgrader.header_replace(cache, b'ATTICIDX', b'BORG_IDX')
  166. class AtticKeyfileKey(KeyfileKey):
  167. """backwards compatible Attic key file parser"""
  168. FILE_ID = 'ATTIC KEY'
  169. # verbatim copy from attic
  170. @staticmethod
  171. def get_keys_dir():
  172. """Determine where to repository keys and cache"""
  173. return os.environ.get('ATTIC_KEYS_DIR',
  174. os.path.join(os.path.expanduser('~'), '.attic', 'keys'))
  175. @classmethod
  176. def find_key_file(cls, repository):
  177. """copy of attic's `find_key_file`_
  178. this has two small modifications:
  179. 1. it uses the above `get_keys_dir`_ instead of the global one,
  180. assumed to be borg's
  181. 2. it uses `repository.path`_ instead of
  182. `repository._location.canonical_path`_ because we can't
  183. assume the repository has been opened by the archiver yet
  184. """
  185. get_keys_dir = cls.get_keys_dir
  186. id = hexlify(repository.id).decode('ascii')
  187. keys_dir = get_keys_dir()
  188. for name in os.listdir(keys_dir):
  189. filename = os.path.join(keys_dir, name)
  190. with open(filename, 'r') as fd:
  191. line = fd.readline().strip()
  192. if line and line.startswith(cls.FILE_ID) and line[10:] == id:
  193. return filename
  194. raise KeyfileNotFoundError(repository.path, get_keys_dir())