archiver.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289
  1. import argparse
  2. from datetime import datetime
  3. import os
  4. import stat
  5. import sys
  6. from .archive import Archive
  7. from .store import Store
  8. from .cache import Cache
  9. from .keychain import Keychain
  10. from .helpers import location_validator, format_file_size, format_time,\
  11. format_file_mode, IncludePattern, ExcludePattern, exclude_path
  12. from .remote import StoreServer, RemoteStore
  13. class Archiver(object):
  14. def __init__(self):
  15. self.exit_code = 0
  16. def open_store(self, location, create=False):
  17. if location.proto == 'ssh':
  18. return RemoteStore(location, create=create)
  19. else:
  20. return Store(location.path, create=create)
  21. def print_error(self, msg, *args):
  22. msg = args and msg % args or msg
  23. if hasattr(sys.stderr, 'encoding'):
  24. msg = msg.encode(sys.stderr.encoding, 'ignore')
  25. self.exit_code = 1
  26. print >> sys.stderr, msg
  27. def print_verbose(self, msg, *args, **kw):
  28. if self.verbose:
  29. msg = args and msg % args or msg
  30. if hasattr(sys.stdout, 'encoding'):
  31. msg = msg.encode(sys.stdout.encoding, 'ignore')
  32. if kw.get('newline', True):
  33. print msg
  34. else:
  35. print msg,
  36. def do_init(self, args):
  37. self.open_store(args.store, create=True)
  38. return self.exit_code
  39. def do_serve(self, args):
  40. return StoreServer().serve()
  41. def do_create(self, args):
  42. store = self.open_store(args.archive)
  43. keychain = Keychain(args.keychain)
  44. try:
  45. Archive(store, keychain, args.archive.archive)
  46. except Archive.DoesNotExist:
  47. pass
  48. else:
  49. self.print_error('Archive already exists')
  50. return self.exit_code
  51. archive = Archive(store, keychain)
  52. cache = Cache(store, keychain)
  53. # Add darc cache dir to inode_skip list
  54. skip_inodes = []
  55. try:
  56. st = os.stat(Cache.cache_dir_path())
  57. skip_inodes.append((st.st_ino, st.st_dev))
  58. except IOError:
  59. pass
  60. # Add local store dir to inode_skip list
  61. if not args.archive.host:
  62. try:
  63. st = os.stat(args.archive.path)
  64. skip_inodes.append((st.st_ino, st.st_dev))
  65. except IOError:
  66. pass
  67. for path in args.paths:
  68. self._process(archive, cache, args.patterns, unicode(path))
  69. archive.save(args.archive.archive, cache)
  70. return self.exit_code
  71. def _process(self, archive, cache, patterns, path):
  72. if exclude_path(path, patterns):
  73. return
  74. try:
  75. st = os.lstat(path)
  76. except OSError, e:
  77. self.print_error('%s: %s', path, e)
  78. return
  79. self.print_verbose(path)
  80. if stat.S_ISDIR(st.st_mode):
  81. archive.process_dir(path, st)
  82. try:
  83. entries = os.listdir(path)
  84. except OSError, e:
  85. self.print_error('%s: %s', path, e)
  86. else:
  87. for filename in entries:
  88. self._process(archive, cache, patterns,
  89. os.path.join(path, filename))
  90. elif stat.S_ISLNK(st.st_mode):
  91. archive.process_symlink(path, st)
  92. elif stat.S_ISFIFO(st.st_mode):
  93. archive.process_fifo(path, st)
  94. elif stat.S_ISREG(st.st_mode):
  95. try:
  96. archive.process_file(path, st, cache)
  97. except IOError, e:
  98. self.print_error('%s: %s', path, e)
  99. else:
  100. self.print_error('Unknown file type: %s', path)
  101. def do_extract(self, args):
  102. store = self.open_store(args.archive)
  103. keychain = Keychain(args.keychain)
  104. archive = Archive(store, keychain, args.archive.archive)
  105. dirs = []
  106. for item in archive.get_items():
  107. if exclude_path(item['path'], args.patterns):
  108. continue
  109. self.print_verbose(item['path'].decode('utf-8'))
  110. archive.extract_item(item, args.dest)
  111. if stat.S_ISDIR(item['mode']):
  112. dirs.append(item)
  113. if dirs and not item['path'].startswith(dirs[-1]['path']):
  114. # Extract directories twice to make sure mtime is correctly restored
  115. archive.extract_item(dirs.pop(-1), args.dest)
  116. while dirs:
  117. archive.extract_item(dirs.pop(-1), args.dest)
  118. return self.exit_code
  119. def do_delete(self, args):
  120. store = self.open_store(args.archive)
  121. keychain = Keychain(args.keychain)
  122. archive = Archive(store, keychain, args.archive.archive)
  123. cache = Cache(store, keychain)
  124. archive.delete(cache)
  125. return self.exit_code
  126. def do_list(self, args):
  127. store = self.open_store(args.src)
  128. keychain = Keychain(args.keychain)
  129. if args.src.archive:
  130. tmap = {1: 'p', 2: 'c', 4: 'd', 6: 'b', 010: '-', 012: 'l', 014: 's'}
  131. archive = Archive(store, keychain, args.src.archive)
  132. for item in archive.get_items():
  133. type = tmap.get(item['mode'] / 4096, '?')
  134. mode = format_file_mode(item['mode'])
  135. size = item.get('size', 0)
  136. mtime = format_time(datetime.fromtimestamp(item['mtime']))
  137. print '%s%s %-6s %-6s %8d %s %s' % (type, mode, item['user'],
  138. item['group'], size, mtime, item['path'])
  139. else:
  140. for archive in Archive.list_archives(store, keychain):
  141. print '%(name)-20s %(time)s' % archive.metadata
  142. return self.exit_code
  143. def do_verify(self, args):
  144. store = self.open_store(args.archive)
  145. keychain = Keychain(args.keychain)
  146. archive = Archive(store, keychain, args.archive.archive)
  147. for item in archive.get_items():
  148. if stat.S_ISREG(item['mode']) and not 'source' in item:
  149. self.print_verbose('%s ...', item['path'].decode('utf-8'), newline=False)
  150. if archive.verify_file(item):
  151. self.print_verbose('OK')
  152. else:
  153. self.print_verbose('ERROR')
  154. self.print_error('%s: verification failed' % item['path'])
  155. return self.exit_code
  156. def do_info(self, args):
  157. store = self.open_store(args.archive)
  158. keychain = Keychain(args.keychain)
  159. archive = Archive(store, keychain, args.archive.archive)
  160. cache = Cache(store, keychain)
  161. osize, csize, usize = archive.stats(cache)
  162. print 'Name:', archive.metadata['name']
  163. print 'Hostname:', archive.metadata['hostname']
  164. print 'Username:', archive.metadata['username']
  165. print 'Time:', archive.metadata['time']
  166. print 'Command line:', ' '.join(archive.metadata['cmdline'])
  167. print 'Original size:', format_file_size(osize)
  168. print 'Compressed size:', format_file_size(csize)
  169. print 'Unique data:', format_file_size(usize)
  170. return self.exit_code
  171. def do_init_keychain(self, args):
  172. return Keychain.generate(args.keychain)
  173. def do_export_restricted(self, args):
  174. keychain = Keychain(args.keychain)
  175. keychain.restrict(args.output)
  176. return self.exit_code
  177. def do_keychain_chpass(self, args):
  178. return Keychain(args.keychain).chpass()
  179. def run(self, args=None):
  180. default_keychain = os.path.join(os.path.expanduser('~'),
  181. '.darc', 'keychain')
  182. parser = argparse.ArgumentParser(description='DARC - Deduplicating Archiver')
  183. parser.add_argument('-k', '--keychain', dest='keychain', type=str,
  184. default=default_keychain,
  185. help='Keychain to use')
  186. parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
  187. default=False,
  188. help='Verbose output')
  189. subparsers = parser.add_subparsers(title='Available subcommands')
  190. subparser = subparsers.add_parser('init-keychain')
  191. subparser.set_defaults(func=self.do_init_keychain)
  192. subparser = subparsers.add_parser('export-restricted')
  193. subparser.add_argument('output', metavar='OUTPUT', type=str,
  194. help='Keychain to create')
  195. subparser.set_defaults(func=self.do_export_restricted)
  196. subparser = subparsers.add_parser('change-password')
  197. subparser.set_defaults(func=self.do_keychain_chpass)
  198. subparser = subparsers.add_parser('init')
  199. subparser.set_defaults(func=self.do_init)
  200. subparser.add_argument('store', metavar='STORE',
  201. type=location_validator(archive=False),
  202. help='Store to initialize')
  203. subparser = subparsers.add_parser('serve')
  204. subparser.set_defaults(func=self.do_serve)
  205. subparser = subparsers.add_parser('create')
  206. subparser.set_defaults(func=self.do_create)
  207. subparser.add_argument('-i', '--include', dest='patterns',
  208. type=IncludePattern, action='append',
  209. help='Include condition')
  210. subparser.add_argument('-e', '--exclude', dest='patterns',
  211. type=ExcludePattern, action='append',
  212. help='Include condition')
  213. subparser.add_argument('archive', metavar='ARCHIVE',
  214. type=location_validator(archive=True),
  215. help='Archive to create')
  216. subparser.add_argument('paths', metavar='PATH', nargs='+', type=str,
  217. help='Paths to add to archive')
  218. subparser = subparsers.add_parser('extract')
  219. subparser.set_defaults(func=self.do_extract)
  220. subparser.add_argument('-i', '--include', dest='patterns',
  221. type=IncludePattern, action='append',
  222. help='Include condition')
  223. subparser.add_argument('-e', '--exclude', dest='patterns',
  224. type=ExcludePattern, action='append',
  225. help='Include condition')
  226. subparser.add_argument('archive', metavar='ARCHIVE',
  227. type=location_validator(archive=True),
  228. help='Archive to create')
  229. subparser.add_argument('dest', metavar='DEST', type=str, nargs='?',
  230. help='Where to extract files')
  231. subparser = subparsers.add_parser('delete')
  232. subparser.set_defaults(func=self.do_delete)
  233. subparser.add_argument('archive', metavar='ARCHIVE',
  234. type=location_validator(archive=True),
  235. help='Archive to delete')
  236. subparser = subparsers.add_parser('list')
  237. subparser.set_defaults(func=self.do_list)
  238. subparser.add_argument('src', metavar='SRC', type=location_validator(),
  239. help='Store/Archive to list contents of')
  240. subparser= subparsers.add_parser('verify')
  241. subparser.set_defaults(func=self.do_verify)
  242. subparser.add_argument('archive', metavar='ARCHIVE',
  243. type=location_validator(archive=True),
  244. help='Archive to verity integrity of')
  245. subparser= subparsers.add_parser('info')
  246. subparser.set_defaults(func=self.do_info)
  247. subparser.add_argument('archive', metavar='ARCHIVE',
  248. type=location_validator(archive=True),
  249. help='Archive to display information about')
  250. args = parser.parse_args(args)
  251. self.verbose = args.verbose
  252. return args.func(args)
  253. def main():
  254. archiver = Archiver()
  255. sys.exit(archiver.run())
  256. if __name__ == '__main__':
  257. main()