archiver.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310
  1. import argparse
  2. from datetime import datetime
  3. from operator import attrgetter
  4. import os
  5. import stat
  6. import sys
  7. from .archive import Archive
  8. from .store import Store
  9. from .cache import Cache
  10. from .key import Key
  11. from .helpers import location_validator, format_file_size, format_time,\
  12. format_file_mode, IncludePattern, ExcludePattern, exclude_path, to_localtime, \
  13. get_cache_dir
  14. from .remote import StoreServer, RemoteStore
  15. class Archiver(object):
  16. def __init__(self):
  17. self.exit_code = 0
  18. def open_store(self, location, create=False):
  19. if location.proto == 'ssh':
  20. return RemoteStore(location, create=create)
  21. else:
  22. return Store(location.path, create=create)
  23. def print_error(self, msg, *args):
  24. msg = args and msg % args or msg
  25. if hasattr(sys.stderr, 'encoding'):
  26. msg = msg.encode(sys.stderr.encoding or 'utf-8', 'ignore')
  27. self.exit_code = 1
  28. print >> sys.stderr, msg
  29. def print_verbose(self, msg, *args, **kw):
  30. if self.verbose:
  31. msg = args and msg % args or msg
  32. if hasattr(sys.stdout, 'encoding'):
  33. msg = msg.encode(sys.stdout.encoding or 'utf-8', 'ignore')
  34. if kw.get('newline', True):
  35. print msg
  36. else:
  37. print msg,
  38. def do_serve(self, args):
  39. return StoreServer().serve()
  40. def do_init(self, args):
  41. store = self.open_store(args.store, create=True)
  42. key = Key.create(store, args.store.to_key_filename(),
  43. password=args.password)
  44. return self.exit_code
  45. def do_create(self, args):
  46. store = self.open_store(args.archive)
  47. key = Key(store)
  48. try:
  49. Archive(store, key, args.archive.archive)
  50. except Archive.DoesNotExist:
  51. pass
  52. else:
  53. self.print_error('Archive already exists')
  54. return self.exit_code
  55. cache = Cache(store, key)
  56. archive = Archive(store, key, cache=cache)
  57. # Add darc cache dir to inode_skip list
  58. skip_inodes = set()
  59. try:
  60. st = os.stat(get_cache_dir())
  61. skip_inodes.add((st.st_ino, st.st_dev))
  62. except IOError:
  63. pass
  64. # Add local store dir to inode_skip list
  65. if not args.archive.host:
  66. try:
  67. st = os.stat(args.archive.path)
  68. skip_inodes.add((st.st_ino, st.st_dev))
  69. except IOError:
  70. pass
  71. for path in args.paths:
  72. self._process(archive, cache, args.patterns, skip_inodes, unicode(path))
  73. archive.save(args.archive.archive, cache)
  74. return self.exit_code
  75. def _process(self, archive, cache, patterns, skip_inodes, path):
  76. if exclude_path(path, patterns):
  77. return
  78. try:
  79. st = os.lstat(path)
  80. except OSError, e:
  81. self.print_error('%s: %s', path, e)
  82. return
  83. if (st.st_ino, st.st_dev) in skip_inodes:
  84. return
  85. self.print_verbose(path)
  86. if stat.S_ISDIR(st.st_mode):
  87. archive.process_dir(path, st)
  88. try:
  89. entries = os.listdir(path)
  90. except OSError, e:
  91. self.print_error('%s: %s', path, e)
  92. else:
  93. for filename in sorted(entries):
  94. self._process(archive, cache, patterns, skip_inodes,
  95. os.path.join(path, filename))
  96. elif stat.S_ISLNK(st.st_mode):
  97. archive.process_symlink(path, st)
  98. elif stat.S_ISFIFO(st.st_mode):
  99. archive.process_fifo(path, st)
  100. elif stat.S_ISREG(st.st_mode):
  101. try:
  102. archive.process_file(path, st, cache)
  103. except IOError, e:
  104. self.print_error('%s: %s', path, e)
  105. else:
  106. self.print_error('Unknown file type: %s', path)
  107. def do_extract(self, args):
  108. def start_cb(item):
  109. self.print_verbose(item['path'].decode('utf-8'))
  110. def extract_cb(item):
  111. if exclude_path(item['path'], args.patterns):
  112. return
  113. archive.extract_item(item, args.dest, start_cb)
  114. if stat.S_ISDIR(item['mode']):
  115. dirs.append(item)
  116. if dirs and not item['path'].startswith(dirs[-1]['path']):
  117. # Extract directories twice to make sure mtime is correctly restored
  118. archive.extract_item(dirs.pop(-1), args.dest)
  119. store = self.open_store(args.archive)
  120. key = Key(store)
  121. archive = Archive(store, key, args.archive.archive)
  122. dirs = []
  123. archive.iter_items(extract_cb)
  124. store.flush_rpc()
  125. while dirs:
  126. archive.extract_item(dirs.pop(-1), args.dest)
  127. return self.exit_code
  128. def do_delete(self, args):
  129. store = self.open_store(args.archive)
  130. key = Key(store)
  131. cache = Cache(store, key)
  132. archive = Archive(store, key, args.archive.archive, cache=cache)
  133. archive.delete(cache)
  134. return self.exit_code
  135. def do_list(self, args):
  136. def callback(item):
  137. type = tmap.get(item['mode'] / 4096, '?')
  138. mode = format_file_mode(item['mode'])
  139. size = 0
  140. if type == '-':
  141. try:
  142. size = sum(size for _, size, _ in item['chunks'])
  143. except KeyError:
  144. pass
  145. mtime = format_time(datetime.fromtimestamp(item['mtime']))
  146. if 'source' in item:
  147. if type == 'l':
  148. extra = ' -> %s' % item['source']
  149. else:
  150. type = 'h'
  151. extra = ' link to %s' % item['source']
  152. else:
  153. extra = ''
  154. print '%s%s %-6s %-6s %8d %s %s%s' % (type, mode, item['user'],
  155. item['group'], size, mtime,
  156. item['path'], extra)
  157. store = self.open_store(args.src)
  158. key = Key(store)
  159. if args.src.archive:
  160. tmap = {1: 'p', 2: 'c', 4: 'd', 6: 'b', 010: '-', 012: 'l', 014: 's'}
  161. archive = Archive(store, key, args.src.archive)
  162. archive.iter_items(callback)
  163. store.flush_rpc()
  164. else:
  165. for archive in sorted(Archive.list_archives(store, key), key=attrgetter('ts')):
  166. print '%-20s %s' % (archive.metadata['name'], to_localtime(archive.ts).strftime('%c'))
  167. return self.exit_code
  168. def do_verify(self, args):
  169. store = self.open_store(args.archive)
  170. key = Key(store)
  171. archive = Archive(store, key, args.archive.archive)
  172. def start_cb(item):
  173. self.print_verbose('%s ...', item['path'].decode('utf-8'), newline=False)
  174. def result_cb(item, success):
  175. if success:
  176. self.print_verbose('OK')
  177. else:
  178. self.print_verbose('ERROR')
  179. self.print_error('%s: verification failed' % item['path'])
  180. def callback(item):
  181. if exclude_path(item['path'], args.patterns):
  182. return
  183. if stat.S_ISREG(item['mode']) and 'chunks' in item:
  184. archive.verify_file(item, start_cb, result_cb)
  185. archive.iter_items(callback)
  186. store.flush_rpc()
  187. return self.exit_code
  188. def do_info(self, args):
  189. store = self.open_store(args.archive)
  190. key = Key(store)
  191. cache = Cache(store, key)
  192. archive = Archive(store, key, args.archive.archive, cache=cache)
  193. stats = archive.stats(cache)
  194. print 'Name:', archive.metadata['name']
  195. print 'Hostname:', archive.metadata['hostname']
  196. print 'Username:', archive.metadata['username']
  197. print 'Time:', archive.metadata['time']
  198. print 'Command line:', ' '.join(archive.metadata['cmdline'])
  199. print 'Original size:', format_file_size(stats['osize'])
  200. print 'Compressed size:', format_file_size(stats['csize'])
  201. print 'Unique data:', format_file_size(stats['usize'])
  202. return self.exit_code
  203. def run(self, args=None):
  204. dot_path = os.path.join(os.path.expanduser('~'), '.darc')
  205. if not os.path.exists(dot_path):
  206. os.mkdir(dot_path)
  207. os.mkdir(os.path.join(dot_path, 'keys'))
  208. os.mkdir(os.path.join(dot_path, 'cache'))
  209. parser = argparse.ArgumentParser(description='DARC - Deduplicating Archiver')
  210. parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
  211. default=False,
  212. help='Verbose output')
  213. subparsers = parser.add_subparsers(title='Available subcommands')
  214. subparser = subparsers.add_parser('serve')
  215. subparser.set_defaults(func=self.do_serve)
  216. subparser = subparsers.add_parser('init')
  217. subparser.set_defaults(func=self.do_init)
  218. subparser.add_argument('-p', '--password', dest='password',
  219. help='Protect store key with password (Default: prompt)')
  220. subparser.add_argument('store',
  221. type=location_validator(archive=False),
  222. help='Store to create')
  223. subparser = subparsers.add_parser('create')
  224. subparser.set_defaults(func=self.do_create)
  225. subparser.add_argument('-i', '--include', dest='patterns',
  226. type=IncludePattern, action='append',
  227. help='Include condition')
  228. subparser.add_argument('-e', '--exclude', dest='patterns',
  229. type=ExcludePattern, action='append',
  230. help='Include condition')
  231. subparser.add_argument('archive', metavar='ARCHIVE',
  232. type=location_validator(archive=True),
  233. help='Archive to create')
  234. subparser.add_argument('paths', metavar='PATH', nargs='+', type=str,
  235. help='Paths to add to archive')
  236. subparser = subparsers.add_parser('extract')
  237. subparser.set_defaults(func=self.do_extract)
  238. subparser.add_argument('-i', '--include', dest='patterns',
  239. type=IncludePattern, action='append',
  240. help='Include condition')
  241. subparser.add_argument('-e', '--exclude', dest='patterns',
  242. type=ExcludePattern, action='append',
  243. help='Include condition')
  244. subparser.add_argument('archive', metavar='ARCHIVE',
  245. type=location_validator(archive=True),
  246. help='Archive to create')
  247. subparser.add_argument('dest', metavar='DEST', type=str, nargs='?',
  248. help='Where to extract files')
  249. subparser = subparsers.add_parser('delete')
  250. subparser.set_defaults(func=self.do_delete)
  251. subparser.add_argument('archive', metavar='ARCHIVE',
  252. type=location_validator(archive=True),
  253. help='Archive to delete')
  254. subparser = subparsers.add_parser('list')
  255. subparser.set_defaults(func=self.do_list)
  256. subparser.add_argument('src', metavar='SRC', type=location_validator(),
  257. help='Store/Archive to list contents of')
  258. subparser= subparsers.add_parser('verify')
  259. subparser.set_defaults(func=self.do_verify)
  260. subparser.add_argument('-i', '--include', dest='patterns',
  261. type=IncludePattern, action='append',
  262. help='Include condition')
  263. subparser.add_argument('-e', '--exclude', dest='patterns',
  264. type=ExcludePattern, action='append',
  265. help='Include condition')
  266. subparser.add_argument('archive', metavar='ARCHIVE',
  267. type=location_validator(archive=True),
  268. help='Archive to verity integrity of')
  269. subparser= subparsers.add_parser('info')
  270. subparser.set_defaults(func=self.do_info)
  271. subparser.add_argument('archive', metavar='ARCHIVE',
  272. type=location_validator(archive=True),
  273. help='Archive to display information about')
  274. args = parser.parse_args(args)
  275. self.verbose = args.verbose
  276. return args.func(args)
  277. def main():
  278. archiver = Archiver()
  279. sys.exit(archiver.run())
  280. if __name__ == '__main__':
  281. main()