archiver.py 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500
  1. import argparse
  2. from binascii import hexlify
  3. from datetime import datetime
  4. from operator import attrgetter
  5. import os
  6. import stat
  7. import sys
  8. from .archive import Archive
  9. from .repository import Repository
  10. from .cache import Cache
  11. from .key import key_creator
  12. from .helpers import location_validator, format_time, \
  13. format_file_mode, IncludePattern, ExcludePattern, exclude_path, adjust_patterns, to_localtime, \
  14. get_cache_dir, get_keys_dir, format_timedelta, prune_split, Manifest, Location, remove_surrogates, \
  15. daemonize
  16. from .remote import RepositoryServer, RemoteRepository, ConnectionClosed
  17. class Archiver:
  18. def __init__(self):
  19. self.exit_code = 0
  20. def open_repository(self, location, create=False):
  21. if location.proto == 'ssh':
  22. repository = RemoteRepository(location, create=create)
  23. else:
  24. repository = Repository(location.path, create=create)
  25. repository._location = location
  26. return repository
  27. def print_error(self, msg, *args):
  28. msg = args and msg % args or msg
  29. self.exit_code = 1
  30. print('attic: ' + msg, file=sys.stderr)
  31. def print_verbose(self, msg, *args, **kw):
  32. if self.verbose:
  33. msg = args and msg % args or msg
  34. if kw.get('newline', True):
  35. print(msg)
  36. else:
  37. print(msg, end=' ')
  38. def do_serve(self, args):
  39. return RepositoryServer().serve()
  40. def do_init(self, args):
  41. """Initialize a new repository
  42. """
  43. print('Initializing repository at "%s"' % args.repository.orig)
  44. repository = self.open_repository(args.repository, create=True)
  45. key = key_creator(repository, args)
  46. manifest = Manifest()
  47. manifest.repository = repository
  48. manifest.key = key
  49. manifest.write()
  50. repository.commit()
  51. return self.exit_code
  52. def do_change_passphrase(self, args):
  53. """Change passphrase on repository key file
  54. """
  55. repository = self.open_repository(Location(args.repository))
  56. manifest, key = Manifest.load(repository)
  57. key.change_passphrase()
  58. return self.exit_code
  59. def do_create(self, args):
  60. """Create new archive
  61. """
  62. t0 = datetime.now()
  63. repository = self.open_repository(args.archive)
  64. manifest, key = Manifest.load(repository)
  65. cache = Cache(repository, key, manifest)
  66. archive = Archive(repository, key, manifest, args.archive.archive, cache=cache,
  67. create=True, checkpoint_interval=args.checkpoint_interval,
  68. numeric_owner=args.numeric_owner)
  69. # Add Attic cache dir to inode_skip list
  70. skip_inodes = set()
  71. try:
  72. st = os.stat(get_cache_dir())
  73. skip_inodes.add((st.st_ino, st.st_dev))
  74. except IOError:
  75. pass
  76. # Add local repository dir to inode_skip list
  77. if not args.archive.host:
  78. try:
  79. st = os.stat(args.archive.path)
  80. skip_inodes.add((st.st_ino, st.st_dev))
  81. except IOError:
  82. pass
  83. for path in args.paths:
  84. if args.dontcross:
  85. try:
  86. restrict_dev = os.lstat(path).st_dev
  87. except OSError as e:
  88. self.print_error('%s: %s', path, e)
  89. continue
  90. else:
  91. restrict_dev = None
  92. self._process(archive, cache, args.excludes, skip_inodes, path, restrict_dev)
  93. archive.save()
  94. if args.stats:
  95. t = datetime.now()
  96. diff = t - t0
  97. print('-' * 40)
  98. print('Archive name: %s' % args.archive.archive)
  99. print('Archive fingerprint: %s' % hexlify(archive.id).decode('ascii'))
  100. print('Start time: %s' % t0.strftime('%c'))
  101. print('End time: %s' % t.strftime('%c'))
  102. print('Duration: %s' % format_timedelta(diff))
  103. archive.stats.print_()
  104. print('-' * 40)
  105. return self.exit_code
  106. def _process(self, archive, cache, excludes, skip_inodes, path, restrict_dev):
  107. if exclude_path(path, excludes):
  108. return
  109. try:
  110. st = os.lstat(path)
  111. except OSError as e:
  112. self.print_error('%s: %s', path, e)
  113. return
  114. if (st.st_ino, st.st_dev) in skip_inodes:
  115. return
  116. # Entering a new filesystem?
  117. if restrict_dev and st.st_dev != restrict_dev:
  118. return
  119. # Ignore unix sockets
  120. if stat.S_ISSOCK(st.st_mode):
  121. return
  122. self.print_verbose(remove_surrogates(path))
  123. if stat.S_ISREG(st.st_mode):
  124. try:
  125. archive.process_file(path, st, cache)
  126. except IOError as e:
  127. self.print_error('%s: %s', path, e)
  128. elif stat.S_ISDIR(st.st_mode):
  129. archive.process_item(path, st)
  130. try:
  131. entries = os.listdir(path)
  132. except OSError as e:
  133. self.print_error('%s: %s', path, e)
  134. else:
  135. for filename in sorted(entries):
  136. self._process(archive, cache, excludes, skip_inodes,
  137. os.path.join(path, filename), restrict_dev)
  138. elif stat.S_ISLNK(st.st_mode):
  139. archive.process_symlink(path, st)
  140. elif stat.S_ISFIFO(st.st_mode):
  141. archive.process_item(path, st)
  142. elif stat.S_ISCHR(st.st_mode) or stat.S_ISBLK(st.st_mode):
  143. archive.process_dev(path, st)
  144. else:
  145. self.print_error('Unknown file type: %s', path)
  146. def do_extract(self, args):
  147. """Extract archive contents
  148. """
  149. repository = self.open_repository(args.archive)
  150. manifest, key = Manifest.load(repository)
  151. archive = Archive(repository, key, manifest, args.archive.archive,
  152. numeric_owner=args.numeric_owner)
  153. patterns = adjust_patterns(args.paths, args.excludes)
  154. dirs = []
  155. for item, peek in archive.iter_items(lambda item: not exclude_path(item[b'path'], patterns)):
  156. while dirs and not item[b'path'].startswith(dirs[-1][b'path']):
  157. archive.extract_item(dirs.pop(-1))
  158. self.print_verbose(remove_surrogates(item[b'path']))
  159. try:
  160. if stat.S_ISDIR(item[b'mode']):
  161. dirs.append(item)
  162. archive.extract_item(item, restore_attrs=False)
  163. else:
  164. archive.extract_item(item, peek=peek)
  165. except IOError as e:
  166. self.print_error('%s: %s', remove_surrogates(item[b'path']), e)
  167. while dirs:
  168. archive.extract_item(dirs.pop(-1))
  169. return self.exit_code
  170. def do_delete(self, args):
  171. """Delete archive
  172. """
  173. repository = self.open_repository(args.archive)
  174. manifest, key = Manifest.load(repository)
  175. cache = Cache(repository, key, manifest)
  176. archive = Archive(repository, key, manifest, args.archive.archive, cache=cache)
  177. archive.delete(cache)
  178. return self.exit_code
  179. def do_mount(self, args):
  180. """Mount archive as a FUSE fileystem
  181. """
  182. try:
  183. from attic.fuse import AtticOperations
  184. except ImportError:
  185. self.print_error('the "llfuse" module is required to use this feature')
  186. return self.exit_code
  187. if not os.path.isdir(args.mountpoint) or not os.access(args.mountpoint, os.R_OK | os.W_OK | os.X_OK):
  188. self.print_error('%s: Mountpoint must be a writable directory' % args.mountpoint)
  189. return self.exit_code
  190. repository = self.open_repository(args.archive)
  191. manifest, key = Manifest.load(repository)
  192. self.print_verbose("Loading archive metadata...", newline=False)
  193. archive = Archive(repository, key, manifest, args.archive.archive)
  194. self.print_verbose('done')
  195. operations = AtticOperations(key, repository, archive)
  196. self.print_verbose("Mounting filesystem")
  197. if not args.foreground:
  198. daemonize()
  199. operations.mount(args.mountpoint)
  200. return self.exit_code
  201. def do_list(self, args):
  202. """List archive or repository contents
  203. """
  204. repository = self.open_repository(args.src)
  205. manifest, key = Manifest.load(repository)
  206. if args.src.archive:
  207. tmap = {1: 'p', 2: 'c', 4: 'd', 6: 'b', 0o10: '-', 0o12: 'l', 0o14: 's'}
  208. archive = Archive(repository, key, manifest, args.src.archive)
  209. for item, _ in archive.iter_items():
  210. type = tmap.get(item[b'mode'] // 4096, '?')
  211. mode = format_file_mode(item[b'mode'])
  212. size = 0
  213. if type == '-':
  214. try:
  215. size = sum(size for _, size, _ in item[b'chunks'])
  216. except KeyError:
  217. pass
  218. mtime = format_time(datetime.fromtimestamp(item[b'mtime'] / 10**9))
  219. if b'source' in item:
  220. if type == 'l':
  221. extra = ' -> %s' % item[b'source']
  222. else:
  223. type = 'h'
  224. extra = ' link to %s' % item[b'source']
  225. else:
  226. extra = ''
  227. print('%s%s %-6s %-6s %8d %s %s%s' % (type, mode, item[b'user'] or item[b'uid'],
  228. item[b'group'] or item[b'gid'], size, mtime,
  229. remove_surrogates(item[b'path']), extra))
  230. else:
  231. for archive in sorted(Archive.list_archives(repository, key, manifest), key=attrgetter('ts')):
  232. print('%-20s %s' % (archive.metadata[b'name'], to_localtime(archive.ts).strftime('%c')))
  233. return self.exit_code
  234. def do_verify(self, args):
  235. """Verify archive consistency
  236. """
  237. repository = self.open_repository(args.archive)
  238. manifest, key = Manifest.load(repository)
  239. archive = Archive(repository, key, manifest, args.archive.archive)
  240. patterns = adjust_patterns(args.paths, args.excludes)
  241. def start_cb(item):
  242. self.print_verbose('%s ...', remove_surrogates(item[b'path']), newline=False)
  243. def result_cb(item, success):
  244. if success:
  245. self.print_verbose('OK')
  246. else:
  247. self.print_verbose('ERROR')
  248. self.print_error('%s: verification failed' % remove_surrogates(item[b'path']))
  249. for item, peek in archive.iter_items(lambda item: not exclude_path(item[b'path'], patterns)):
  250. if stat.S_ISREG(item[b'mode']) and b'chunks' in item:
  251. archive.verify_file(item, start_cb, result_cb, peek=peek)
  252. return self.exit_code
  253. def do_info(self, args):
  254. """Show archive details such as disk space used
  255. """
  256. repository = self.open_repository(args.archive)
  257. manifest, key = Manifest.load(repository)
  258. cache = Cache(repository, key, manifest)
  259. archive = Archive(repository, key, manifest, args.archive.archive, cache=cache)
  260. stats = archive.calc_stats(cache)
  261. print('Name:', archive.name)
  262. print('Fingerprint: %s' % hexlify(archive.id).decode('ascii'))
  263. print('Hostname:', archive.metadata[b'hostname'])
  264. print('Username:', archive.metadata[b'username'])
  265. print('Time: %s' % to_localtime(archive.ts).strftime('%c'))
  266. print('Command line:', remove_surrogates(' '.join(archive.metadata[b'cmdline'])))
  267. stats.print_()
  268. return self.exit_code
  269. def do_prune(self, args):
  270. """Prune repository archives according to specified rules
  271. """
  272. repository = self.open_repository(args.repository)
  273. manifest, key = Manifest.load(repository)
  274. cache = Cache(repository, key, manifest)
  275. archives = list(sorted(Archive.list_archives(repository, key, manifest, cache),
  276. key=attrgetter('ts'), reverse=True))
  277. if args.hourly + args.daily + args.weekly + args.monthly + args.yearly == 0:
  278. self.print_error('At least one of the "hourly", "daily", "weekly", "monthly" or "yearly" '
  279. 'settings must be specified')
  280. return 1
  281. if args.prefix:
  282. archives = [archive for archive in archives if archive.name.startswith(args.prefix)]
  283. keep = []
  284. if args.hourly:
  285. keep += prune_split(archives, '%Y-%m-%d %H', args.hourly)
  286. if args.daily:
  287. keep += prune_split(archives, '%Y-%m-%d', args.daily, keep)
  288. if args.weekly:
  289. keep += prune_split(archives, '%G-%V', args.weekly, keep)
  290. if args.monthly:
  291. keep += prune_split(archives, '%Y-%m', args.monthly, keep)
  292. if args.yearly:
  293. keep += prune_split(archives, '%Y', args.yearly, keep)
  294. keep.sort(key=attrgetter('ts'), reverse=True)
  295. to_delete = [a for a in archives if a not in keep]
  296. for archive in keep:
  297. self.print_verbose('Keeping archive "%s"' % archive.name)
  298. for archive in to_delete:
  299. self.print_verbose('Pruning archive "%s"', archive.name)
  300. archive.delete(cache)
  301. return self.exit_code
  302. def run(self, args=None):
  303. keys_dir = get_keys_dir()
  304. if not os.path.exists(keys_dir):
  305. os.makedirs(keys_dir)
  306. os.chmod(keys_dir, stat.S_IRWXU)
  307. cache_dir = get_cache_dir()
  308. if not os.path.exists(cache_dir):
  309. os.makedirs(cache_dir)
  310. os.chmod(cache_dir, stat.S_IRWXU)
  311. common_parser = argparse.ArgumentParser(add_help=False)
  312. common_parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
  313. default=False,
  314. help='verbose output')
  315. parser = argparse.ArgumentParser(description='Attic - Deduplicated Backups')
  316. subparsers = parser.add_subparsers(title='Available subcommands')
  317. subparser = subparsers.add_parser('serve', parents=[common_parser])
  318. subparser.set_defaults(func=self.do_serve)
  319. subparser = subparsers.add_parser('init', parents=[common_parser],
  320. description=self.do_init.__doc__)
  321. subparser.set_defaults(func=self.do_init)
  322. subparser.add_argument('repository',
  323. type=location_validator(archive=False),
  324. help='repository to create')
  325. subparser.add_argument('--key-file', dest='keyfile',
  326. action='store_true', default=False,
  327. help='enable key file based encryption')
  328. subparser.add_argument('--passphrase', dest='passphrase',
  329. action='store_true', default=False,
  330. help='enable passphrase based encryption')
  331. subparser = subparsers.add_parser('change-passphrase', parents=[common_parser],
  332. description=self.do_change_passphrase.__doc__)
  333. subparser.set_defaults(func=self.do_change_passphrase)
  334. subparser.add_argument('repository', type=location_validator(archive=False))
  335. subparser = subparsers.add_parser('create', parents=[common_parser],
  336. description=self.do_create.__doc__)
  337. subparser.set_defaults(func=self.do_create)
  338. subparser.add_argument('-s', '--stats', dest='stats',
  339. action='store_true', default=False,
  340. help='print statistics for the created archive')
  341. subparser.add_argument('-e', '--exclude', dest='excludes',
  342. type=ExcludePattern, action='append',
  343. metavar="PATTERN", help='exclude paths matching PATTERN')
  344. subparser.add_argument('-c', '--checkpoint-interval', dest='checkpoint_interval',
  345. type=int, default=300, metavar='SECONDS',
  346. help='write checkpointe ever SECONDS seconds (Default: 300)')
  347. subparser.add_argument('--do-not-cross-mountpoints', dest='dontcross',
  348. action='store_true', default=False,
  349. help='do not cross mount points')
  350. subparser.add_argument('--numeric-owner', dest='numeric_owner',
  351. action='store_true', default=False,
  352. help='only store numeric user and group identifiers')
  353. subparser.add_argument('archive', metavar='ARCHIVE',
  354. type=location_validator(archive=True),
  355. help='archive to create')
  356. subparser.add_argument('paths', metavar='PATH', nargs='+', type=str,
  357. help='paths to archive')
  358. subparser = subparsers.add_parser('extract', parents=[common_parser],
  359. description=self.do_extract.__doc__)
  360. subparser.set_defaults(func=self.do_extract)
  361. subparser.add_argument('-e', '--exclude', dest='excludes',
  362. type=ExcludePattern, action='append',
  363. metavar="PATTERN", help='exclude paths matching PATTERN')
  364. subparser.add_argument('--numeric-owner', dest='numeric_owner',
  365. action='store_true', default=False,
  366. help='only obey numeric user and group identifiers')
  367. subparser.add_argument('archive', metavar='ARCHIVE',
  368. type=location_validator(archive=True),
  369. help='archive to extract')
  370. subparser.add_argument('paths', metavar='PATH', nargs='*', type=str,
  371. help='paths to extract')
  372. subparser = subparsers.add_parser('delete', parents=[common_parser],
  373. description=self.do_delete.__doc__)
  374. subparser.set_defaults(func=self.do_delete)
  375. subparser.add_argument('archive', metavar='ARCHIVE',
  376. type=location_validator(archive=True),
  377. help='archive to delete')
  378. subparser = subparsers.add_parser('list', parents=[common_parser],
  379. description=self.do_list.__doc__)
  380. subparser.set_defaults(func=self.do_list)
  381. subparser.add_argument('src', metavar='SRC', type=location_validator(),
  382. help='repository/archive to list contents of')
  383. subparser = subparsers.add_parser('mount', parents=[common_parser],
  384. description=self.do_mount.__doc__)
  385. subparser.set_defaults(func=self.do_mount)
  386. subparser.add_argument('archive', metavar='ARCHIVE', type=location_validator(archive=True),
  387. help='archive to mount')
  388. subparser.add_argument('mountpoint', metavar='MOUNTPOINT', type=str,
  389. help='where to mount filesystem')
  390. subparser.add_argument('-f', '--foreground', dest='foreground',
  391. action='store_true', default=False,
  392. help='stay in foreground, do not daemonize')
  393. subparser = subparsers.add_parser('verify', parents=[common_parser],
  394. description=self.do_verify.__doc__)
  395. subparser.set_defaults(func=self.do_verify)
  396. subparser.add_argument('-e', '--exclude', dest='excludes',
  397. type=ExcludePattern, action='append',
  398. metavar="PATTERN", help='exclude paths matching PATTERN')
  399. subparser.add_argument('archive', metavar='ARCHIVE',
  400. type=location_validator(archive=True),
  401. help='archive to verity integrity of')
  402. subparser.add_argument('paths', metavar='PATH', nargs='*', type=str,
  403. help='paths to verify')
  404. subparser = subparsers.add_parser('info', parents=[common_parser],
  405. description=self.do_info.__doc__)
  406. subparser.set_defaults(func=self.do_info)
  407. subparser.add_argument('archive', metavar='ARCHIVE',
  408. type=location_validator(archive=True),
  409. help='archive to display information about')
  410. subparser = subparsers.add_parser('prune', parents=[common_parser],
  411. description=self.do_prune.__doc__)
  412. subparser.set_defaults(func=self.do_prune)
  413. subparser.add_argument('-H', '--hourly', dest='hourly', type=int, default=0,
  414. help='number of hourly archives to keep')
  415. subparser.add_argument('-d', '--daily', dest='daily', type=int, default=0,
  416. help='number of daily archives to keep')
  417. subparser.add_argument('-w', '--weekly', dest='weekly', type=int, default=0,
  418. help='number of daily archives to keep')
  419. subparser.add_argument('-m', '--monthly', dest='monthly', type=int, default=0,
  420. help='number of monthly archives to keep')
  421. subparser.add_argument('-y', '--yearly', dest='yearly', type=int, default=0,
  422. help='number of yearly archives to keep')
  423. subparser.add_argument('-p', '--prefix', dest='prefix', type=str,
  424. help='only consider archive names starting with this prefix')
  425. subparser.add_argument('repository', metavar='REPOSITORY',
  426. type=location_validator(archive=False),
  427. help='repository to prune')
  428. args = parser.parse_args(args or ['-h'])
  429. self.verbose = args.verbose
  430. return args.func(args)
  431. def main():
  432. archiver = Archiver()
  433. try:
  434. exit_code = archiver.run(sys.argv[1:])
  435. except Repository.DoesNotExist:
  436. archiver.print_error('Error: Repository not found')
  437. exit_code = 1
  438. except Repository.AlreadyExists:
  439. archiver.print_error('Error: Repository already exists')
  440. exit_code = 1
  441. except Archive.AlreadyExists as e:
  442. archiver.print_error('Error: Archive "%s" already exists', e)
  443. exit_code = 1
  444. except Archive.DoesNotExist as e:
  445. archiver.print_error('Error: Archive "%s" does not exist', e)
  446. exit_code = 1
  447. except ConnectionClosed:
  448. archiver.print_error('Connection closed by remote host')
  449. exit_code = 1
  450. except KeyboardInterrupt:
  451. archiver.print_error('Error: Keyboard interrupt')
  452. exit_code = 1
  453. else:
  454. if exit_code:
  455. archiver.print_error('Exiting with failure status due to previous errors')
  456. sys.exit(exit_code)
  457. if __name__ == '__main__':
  458. main()