helpers.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380
  1. from __future__ import with_statement
  2. import argparse
  3. from datetime import datetime, timedelta
  4. from fnmatch import fnmatchcase
  5. from operator import attrgetter
  6. import grp
  7. import msgpack
  8. import os
  9. import pwd
  10. import re
  11. import stat
  12. import sys
  13. import time
  14. import urllib
  15. class Manifest(object):
  16. MANIFEST_ID = '\0' * 32
  17. def __init__(self, store, key, dont_load=False):
  18. self.store = store
  19. self.key = key
  20. self.archives = {}
  21. self.config = {}
  22. if not dont_load:
  23. self.load()
  24. def load(self):
  25. data = self.key.decrypt(None, self.store.get(self.MANIFEST_ID))
  26. self.id = self.key.id_hash(data)
  27. manifest = msgpack.unpackb(data)
  28. if not manifest.get('version') == 1:
  29. raise ValueError('Invalid manifest version')
  30. self.archives = manifest['archives']
  31. self.config = manifest['config']
  32. self.key.post_manifest_load(self.config)
  33. def write(self):
  34. self.key.pre_manifest_write(self)
  35. data = msgpack.packb({
  36. 'version': 1,
  37. 'archives': self.archives,
  38. 'config': self.config,
  39. })
  40. self.id = self.key.id_hash(data)
  41. self.store.put(self.MANIFEST_ID, self.key.encrypt(data))
  42. def prune_split(archives, pattern, n, skip=[]):
  43. items = {}
  44. keep = []
  45. for a in archives:
  46. key = to_localtime(a.ts).strftime(pattern)
  47. items.setdefault(key, [])
  48. items[key].append(a)
  49. for key, values in sorted(items.items(), reverse=True):
  50. if n and values[0] not in skip:
  51. values.sort(key=attrgetter('ts'), reverse=True)
  52. keep.append(values[0])
  53. n -= 1
  54. return keep
  55. class Statistics(object):
  56. def __init__(self):
  57. self.osize = self.csize = self.usize = self.nfiles = 0
  58. def update(self, size, csize, unique):
  59. self.osize += size
  60. self.csize += csize
  61. if unique:
  62. self.usize += csize
  63. def print_(self):
  64. print 'Number of files: %d' % self.nfiles
  65. print 'Original size: %d (%s)' % (self.osize, format_file_size(self.osize))
  66. print 'Compressed size: %s (%s)' % (self.csize, format_file_size(self.csize))
  67. print 'Unique data: %d (%s)' % (self.usize, format_file_size(self.usize))
  68. # OSX filenames are UTF-8 Only so any non-utf8 filenames are url encoded
  69. if sys.platform == 'darwin':
  70. def encode_filename(name):
  71. try:
  72. return name.decode('utf-8')
  73. except UnicodeDecodeError:
  74. return urllib.quote(name)
  75. else:
  76. encode_filename = str
  77. def get_keys_dir():
  78. """Determine where to store keys and cache"""
  79. return os.environ.get('DARC_KEYS_DIR',
  80. os.path.join(os.path.expanduser('~'), '.darc', 'keys'))
  81. def get_cache_dir():
  82. """Determine where to store keys and cache"""
  83. return os.environ.get('DARC_CACHE_DIR',
  84. os.path.join(os.path.expanduser('~'), '.darc', 'cache'))
  85. def to_localtime(ts):
  86. """Convert datetime object from UTC to local time zone"""
  87. return ts - timedelta(seconds=time.altzone)
  88. def adjust_patterns(patterns):
  89. if patterns and isinstance(patterns[-1], IncludePattern):
  90. patterns.append(ExcludePattern('*'))
  91. elif patterns and isinstance(patterns[-1], ExcludePattern):
  92. patterns.append(IncludePattern('*'))
  93. def exclude_path(path, patterns):
  94. """Used by create and extract sub-commands to determine
  95. if an item should be processed or not
  96. """
  97. for pattern in (patterns or []):
  98. if pattern.match(path):
  99. return isinstance(pattern, ExcludePattern)
  100. return False
  101. class IncludePattern(object):
  102. """--include PATTERN
  103. >>> py = IncludePattern('*.py')
  104. >>> foo = IncludePattern('/foo')
  105. >>> py.match('/foo/foo.py')
  106. True
  107. >>> py.match('/bar/foo.java')
  108. False
  109. >>> foo.match('/foo/foo.py')
  110. True
  111. >>> foo.match('/bar/foo.java')
  112. False
  113. >>> foo.match('/foobar/foo.py')
  114. False
  115. >>> foo.match('/foo')
  116. True
  117. """
  118. def __init__(self, pattern):
  119. self.pattern = self.dirpattern = pattern
  120. if not pattern.endswith(os.path.sep):
  121. self.dirpattern += os.path.sep
  122. def match(self, path):
  123. dir, name = os.path.split(path)
  124. return (path == self.pattern
  125. or (dir + os.path.sep).startswith(self.dirpattern)
  126. or fnmatchcase(name, self.pattern))
  127. def __repr__(self):
  128. return '%s(%s)' % (type(self), self.pattern)
  129. class ExcludePattern(IncludePattern):
  130. """
  131. """
  132. def walk_path(path, skip_inodes=None):
  133. st = os.lstat(path)
  134. if skip_inodes and (st.st_ino, st.st_dev) in skip_inodes:
  135. return
  136. yield path, st
  137. if stat.S_ISDIR(st.st_mode):
  138. for f in os.listdir(path):
  139. for x in walk_path(os.path.join(path, f), skip_inodes):
  140. yield x
  141. def format_time(t):
  142. """Format datetime suitable for fixed length list output
  143. """
  144. if (datetime.now() - t).days < 365:
  145. return t.strftime('%b %d %H:%M')
  146. else:
  147. return t.strftime('%b %d %Y')
  148. def format_timedelta(td):
  149. """Format timedelta in a human friendly format
  150. >>> from datetime import datetime
  151. >>> t0 = datetime(2001, 1, 1, 10, 20, 3, 0)
  152. >>> t1 = datetime(2001, 1, 1, 12, 20, 4, 100000)
  153. >>> format_timedelta(t1 - t0)
  154. '2 hours 1.10 seconds'
  155. """
  156. # Since td.total_seconds() requires python 2.7
  157. ts = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6)
  158. s = ts % 60
  159. m = int(ts / 60) % 60
  160. h = int(ts / 3600) % 24
  161. txt = '%.2f seconds' % s
  162. if m:
  163. txt = '%d minutes %s' % (m, txt)
  164. if h:
  165. txt = '%d hours %s' % (h, txt)
  166. if td.days:
  167. txt = '%d days %s' % (td.days, txt)
  168. return txt
  169. def format_file_mode(mod):
  170. """Format file mode bits for list output
  171. """
  172. def x(v):
  173. return ''.join(v & m and s or '-'
  174. for m, s in ((4, 'r'), (2, 'w'), (1, 'x')))
  175. return '%s%s%s' % (x(mod / 64), x(mod / 8), x(mod))
  176. def format_file_size(v):
  177. """Format file size into a human friendly format
  178. """
  179. if v > 1024 * 1024 * 1024:
  180. return '%.2f GB' % (v / 1024. / 1024. / 1024.)
  181. elif v > 1024 * 1024:
  182. return '%.2f MB' % (v / 1024. / 1024.)
  183. elif v > 1024:
  184. return '%.2f kB' % (v / 1024.)
  185. else:
  186. return '%d B' % v
  187. class IntegrityError(Exception):
  188. """
  189. """
  190. def memoize(function):
  191. cache = {}
  192. def decorated_function(*args):
  193. try:
  194. return cache[args]
  195. except KeyError:
  196. val = function(*args)
  197. cache[args] = val
  198. return val
  199. return decorated_function
  200. @memoize
  201. def uid2user(uid):
  202. try:
  203. return pwd.getpwuid(uid).pw_name
  204. except KeyError:
  205. return None
  206. @memoize
  207. def user2uid(user):
  208. try:
  209. return user and pwd.getpwnam(user).pw_uid
  210. except KeyError:
  211. return None
  212. @memoize
  213. def gid2group(gid):
  214. try:
  215. return grp.getgrgid(gid).gr_name
  216. except KeyError:
  217. return None
  218. @memoize
  219. def group2gid(group):
  220. try:
  221. return group and grp.getgrnam(group).gr_gid
  222. except KeyError:
  223. return None
  224. class Location(object):
  225. """Object representing a store / archive location
  226. >>> Location('ssh://user@host:1234/some/path::archive')
  227. Location(proto='ssh', user='user', host='host', port=1234, path='/some/path', archive='archive')
  228. >>> Location('file:///some/path::archive')
  229. Location(proto='file', user=None, host=None, port=None, path='/some/path', archive='archive')
  230. >>> Location('user@host:/some/path::archive')
  231. Location(proto='ssh', user='user', host='host', port=22, path='/some/path', archive='archive')
  232. >>> Location('/some/path::archive')
  233. Location(proto='file', user=None, host=None, port=None, path='/some/path', archive='archive')
  234. """
  235. proto = user = host = port = path = archive = None
  236. ssh_re = re.compile(r'(?P<proto>ssh)://(?:(?P<user>[^@]+)@)?'
  237. r'(?P<host>[^:/#]+)(?::(?P<port>\d+))?'
  238. r'(?P<path>[^:]*)(?:::(?P<archive>.+))?')
  239. file_re = re.compile(r'(?P<proto>file)://'
  240. r'(?P<path>[^:]*)(?:::(?P<archive>.+))?')
  241. scp_re = re.compile(r'((?:(?P<user>[^@]+)@)?(?P<host>[^:/]+):)?'
  242. r'(?P<path>[^:]*)(?:::(?P<archive>.+))?')
  243. def __init__(self, text):
  244. self.orig = text
  245. if not self.parse(text):
  246. raise ValueError
  247. def parse(self, text):
  248. m = self.ssh_re.match(text)
  249. if m:
  250. self.proto = m.group('proto')
  251. self.user = m.group('user')
  252. self.host = m.group('host')
  253. self.port = m.group('port') and int(m.group('port')) or 22
  254. self.path = m.group('path')
  255. self.archive = m.group('archive')
  256. return True
  257. m = self.file_re.match(text)
  258. if m:
  259. self.proto = m.group('proto')
  260. self.path = m.group('path')
  261. self.archive = m.group('archive')
  262. return True
  263. m = self.scp_re.match(text)
  264. if m:
  265. self.user = m.group('user')
  266. self.host = m.group('host')
  267. self.path = m.group('path')
  268. self.archive = m.group('archive')
  269. self.proto = self.host and 'ssh' or 'file'
  270. if self.proto == 'ssh':
  271. self.port = 22
  272. return True
  273. return False
  274. def __str__(self):
  275. items = []
  276. items.append('proto=%r' % self.proto)
  277. items.append('user=%r' % self.user)
  278. items.append('host=%r' % self.host)
  279. items.append('port=%r' % self.port)
  280. items.append('path=%r' % self.path)
  281. items.append('archive=%r' % self.archive)
  282. return ', '.join(items)
  283. def to_key_filename(self):
  284. name = re.sub('[^\w]', '_', self.path).strip('_')
  285. if self.proto != 'file':
  286. name = self.host + '__' + name
  287. return os.path.join(get_keys_dir(), name)
  288. def __repr__(self):
  289. return "Location(%s)" % self
  290. def location_validator(archive=None):
  291. def validator(text):
  292. try:
  293. loc = Location(text)
  294. except ValueError:
  295. raise argparse.ArgumentTypeError('Invalid location format: "%s"' % text)
  296. if archive is True and not loc.archive:
  297. raise argparse.ArgumentTypeError('"%s": No archive specified' % text)
  298. elif archive is False and loc.archive:
  299. raise argparse.ArgumentTypeError('"%s" No archive can be specified' % text)
  300. return loc
  301. return validator
  302. def read_msgpack(filename):
  303. with open(filename, 'rb') as fd:
  304. return msgpack.unpack(fd)
  305. def write_msgpack(filename, d):
  306. with open(filename + '.tmp', 'wb') as fd:
  307. msgpack.pack(d, fd)
  308. fd.flush()
  309. os.fsync(fd)
  310. os.rename(filename + '.tmp', filename)