helpers.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404
  1. from __future__ import with_statement
  2. import argparse
  3. from datetime import datetime, timedelta
  4. from fnmatch import fnmatchcase
  5. from operator import attrgetter
  6. import grp
  7. import os
  8. import pwd
  9. import re
  10. import stat
  11. import struct
  12. import sys
  13. import time
  14. import urllib
  15. def purge_split(archives, pattern, n, reverse=False):
  16. items = {}
  17. keep = []
  18. delete = []
  19. for a in archives:
  20. key = to_localtime(a.ts).strftime(pattern)
  21. items.setdefault(key, [])
  22. items[key].append(a)
  23. for key, values in sorted(items.items(), reverse=reverse):
  24. if n:
  25. values.sort(key=attrgetter('ts'), reverse=reverse)
  26. keep.append(values[0])
  27. delete += values[1:]
  28. n -= 1
  29. else:
  30. delete += values
  31. return keep, delete
  32. class Statistics(object):
  33. def __init__(self):
  34. self.osize = self.csize = self.usize = self.nfiles = 0
  35. def update(self, size, csize, unique):
  36. self.osize += size
  37. self.csize += csize
  38. if unique:
  39. self.usize += csize
  40. def print_(self):
  41. print 'Number of files: %d' % self.nfiles
  42. print 'Original size: %d (%s)' % (self.osize, format_file_size(self.osize))
  43. print 'Compressed size: %s (%s)'% (self.csize, format_file_size(self.csize))
  44. print 'Unique data: %d (%s)' % (self.usize, format_file_size(self.usize))
  45. # OSX filenames are UTF-8 Only so any non-utf8 filenames are url encoded
  46. if sys.platform == 'darwin':
  47. def encode_filename(name):
  48. try:
  49. name.decode('utf-8')
  50. return name
  51. except UnicodeDecodeError:
  52. return urllib.quote(name)
  53. else:
  54. encode_filename = str
  55. class Counter(object):
  56. __slots__ = ('v',)
  57. def __init__(self, value=0):
  58. self.v = value
  59. def inc(self, amount=1):
  60. self.v += amount
  61. def dec(self, amount=1):
  62. self.v -= amount
  63. def __cmp__(self, x):
  64. return cmp(self.v, x)
  65. def __repr__(self):
  66. return '<Counter(%r)>' % self.v
  67. def get_keys_dir():
  68. """Determine where to store keys and cache"""
  69. return os.environ.get('DARC_KEYS_DIR',
  70. os.path.join(os.path.expanduser('~'), '.darc', 'keys'))
  71. def get_cache_dir():
  72. """Determine where to store keys and cache"""
  73. return os.environ.get('DARC_CACHE_DIR',
  74. os.path.join(os.path.expanduser('~'), '.darc', 'cache'))
  75. def deferrable(f):
  76. def wrapper(*args, **kw):
  77. callback = kw.pop('callback', None)
  78. if callback:
  79. data = kw.pop('callback_data', None)
  80. try:
  81. res = f(*args, **kw)
  82. except Exception, e:
  83. callback(None, e, data)
  84. else:
  85. callback(res, None, data)
  86. else:
  87. return f(*args, **kw)
  88. return wrapper
  89. def error_callback(res, error, data):
  90. if res:
  91. raise res
  92. def to_localtime(ts):
  93. """Convert datetime object from UTC to local time zone"""
  94. return ts - timedelta(seconds=time.altzone)
  95. def read_set(path):
  96. """Read set from disk (as int32s)
  97. """
  98. with open(path, 'rb') as fd:
  99. data = fd.read()
  100. return set(struct.unpack('<%di' % (len(data) / 4), data))
  101. def write_set(s, path):
  102. """Write set to disk (as int32s)
  103. """
  104. with open(path, 'wb') as fd:
  105. fd.write(struct.pack('<%di' % len(s), *s))
  106. def encode_long(v):
  107. bytes = []
  108. while True:
  109. if v > 0x7f:
  110. bytes.append(0x80 | (v % 0x80))
  111. v >>= 7
  112. else:
  113. bytes.append(v)
  114. return ''.join(chr(x) for x in bytes)
  115. def decode_long(bytes):
  116. v = 0
  117. base = 0
  118. for x in bytes:
  119. b = ord(x)
  120. if b & 0x80:
  121. v += (b & 0x7f) << base
  122. base += 7
  123. else:
  124. return v + (b << base)
  125. def exclude_path(path, patterns):
  126. """Used by create and extract sub-commands to determine
  127. if an item should be processed or not
  128. """
  129. for pattern in (patterns or []):
  130. if pattern.match(path):
  131. return isinstance(pattern, ExcludePattern)
  132. return False
  133. class IncludePattern(object):
  134. """--include PATTERN
  135. >>> py = IncludePattern('*.py')
  136. >>> foo = IncludePattern('/foo')
  137. >>> py.match('/foo/foo.py')
  138. True
  139. >>> py.match('/bar/foo.java')
  140. False
  141. >>> foo.match('/foo/foo.py')
  142. True
  143. >>> foo.match('/bar/foo.java')
  144. False
  145. >>> foo.match('/foobar/foo.py')
  146. False
  147. >>> foo.match('/foo')
  148. True
  149. """
  150. def __init__(self, pattern):
  151. self.pattern = self.dirpattern = pattern
  152. if not pattern.endswith(os.path.sep):
  153. self.dirpattern += os.path.sep
  154. def match(self, path):
  155. dir, name = os.path.split(path)
  156. return (path == self.pattern
  157. or (dir + os.path.sep).startswith(self.dirpattern)
  158. or fnmatchcase(name, self.pattern))
  159. def __repr__(self):
  160. return '%s(%s)' % (type(self), self.pattern)
  161. class ExcludePattern(IncludePattern):
  162. """
  163. """
  164. def walk_path(path, skip_inodes=None):
  165. st = os.lstat(path)
  166. if skip_inodes and (st.st_ino, st.st_dev) in skip_inodes:
  167. return
  168. yield path, st
  169. if stat.S_ISDIR(st.st_mode):
  170. for f in os.listdir(path):
  171. for x in walk_path(os.path.join(path, f), skip_inodes):
  172. yield x
  173. def format_time(t):
  174. """Format datetime suitable for fixed length list output
  175. """
  176. if (datetime.now() - t).days < 365:
  177. return t.strftime('%b %d %H:%M')
  178. else:
  179. return t.strftime('%b %d %Y')
  180. def format_timedelta(td):
  181. """Format timedelta in a human friendly format
  182. >>> from datetime import datetime
  183. >>> t0 = datetime(2001, 1, 1, 10, 20, 3, 0)
  184. >>> t1 = datetime(2001, 1, 1, 12, 20, 4, 100000)
  185. >>> format_timedelta(t1 - t0)
  186. '2 hours 1.10 seconds'
  187. """
  188. # Since td.total_seconds() requires python 2.7
  189. ts = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / float(10**6)
  190. s = ts % 60
  191. m = int(ts / 60) % 60
  192. h = int(ts / 3600) % 24
  193. txt = '%.2f seconds' % s
  194. if m:
  195. txt = '%d minutes %s' % (m, txt)
  196. if h:
  197. txt = '%d hours %s' % (h, txt)
  198. if td.days:
  199. txt = '%d days %s' % (td.days, txt)
  200. return txt
  201. def format_file_mode(mod):
  202. """Format file mode bits for list output
  203. """
  204. def x(v):
  205. return ''.join(v & m and s or '-'
  206. for m, s in ((4, 'r'), (2, 'w'), (1, 'x')))
  207. return '%s%s%s' % (x(mod / 64), x(mod / 8), x(mod))
  208. def format_file_size(v):
  209. """Format file size into a human friendly format
  210. """
  211. if v > 1024 * 1024 * 1024:
  212. return '%.2f GB' % (v / 1024. / 1024. / 1024.)
  213. elif v > 1024 * 1024:
  214. return '%.2f MB' % (v / 1024. / 1024.)
  215. elif v > 1024:
  216. return '%.2f kB' % (v / 1024.)
  217. else:
  218. return str(v)
  219. class IntegrityError(Exception):
  220. """
  221. """
  222. def memoize(function):
  223. cache = {}
  224. def decorated_function(*args):
  225. try:
  226. return cache[args]
  227. except KeyError:
  228. val = function(*args)
  229. cache[args] = val
  230. return val
  231. return decorated_function
  232. @memoize
  233. def uid2user(uid):
  234. try:
  235. return pwd.getpwuid(uid).pw_name
  236. except KeyError:
  237. return None
  238. @memoize
  239. def user2uid(user):
  240. try:
  241. return pwd.getpwnam(user).pw_uid
  242. except KeyError:
  243. return None
  244. @memoize
  245. def gid2group(gid):
  246. try:
  247. return grp.getgrgid(gid).gr_name
  248. except KeyError:
  249. return None
  250. @memoize
  251. def group2gid(group):
  252. try:
  253. return grp.getgrnam(group).gr_gid
  254. except KeyError:
  255. return None
  256. class Location(object):
  257. """Object representing a store / archive location
  258. >>> Location('ssh://user@host:1234/some/path::archive')
  259. Location(proto='ssh', user='user', host='host', port=1234, path='/some/path', archive='archive')
  260. >>> Location('file:///some/path::archive')
  261. Location(proto='file', user=None, host=None, port=None, path='/some/path', archive='archive')
  262. >>> Location('user@host:/some/path::archive')
  263. Location(proto='ssh', user='user', host='host', port=22, path='/some/path', archive='archive')
  264. >>> Location('/some/path::archive')
  265. Location(proto='file', user=None, host=None, port=None, path='/some/path', archive='archive')
  266. """
  267. proto = user = host = port = path = archive = None
  268. ssh_re = re.compile(r'(?P<proto>ssh)://(?:(?P<user>[^@]+)@)?'
  269. r'(?P<host>[^:/#]+)(?::(?P<port>\d+))?'
  270. r'(?P<path>[^:]*)(?:::(?P<archive>.+))?')
  271. file_re = re.compile(r'(?P<proto>file)://'
  272. r'(?P<path>[^:]*)(?:::(?P<archive>.+))?')
  273. scp_re = re.compile(r'((?:(?P<user>[^@]+)@)?(?P<host>[^:/]+):)?'
  274. r'(?P<path>[^:]*)(?:::(?P<archive>.+))?')
  275. def __init__(self, text):
  276. if not self.parse(text):
  277. raise ValueError
  278. def parse(self, text):
  279. m = self.ssh_re.match(text)
  280. if m:
  281. self.proto = m.group('proto')
  282. self.user = m.group('user')
  283. self.host = m.group('host')
  284. self.port = m.group('port') and int(m.group('port')) or 22
  285. self.path = m.group('path')
  286. self.archive = m.group('archive')
  287. return True
  288. m = self.file_re.match(text)
  289. if m:
  290. self.proto = m.group('proto')
  291. self.path = m.group('path')
  292. self.archive = m.group('archive')
  293. return True
  294. m = self.scp_re.match(text)
  295. if m:
  296. self.user = m.group('user')
  297. self.host = m.group('host')
  298. self.path = m.group('path')
  299. self.archive = m.group('archive')
  300. self.proto = self.host and 'ssh' or 'file'
  301. if self.proto == 'ssh':
  302. self.port = 22
  303. return True
  304. return False
  305. def __str__(self):
  306. items = []
  307. items.append('proto=%r' % self.proto)
  308. items.append('user=%r' % self.user)
  309. items.append('host=%r' % self.host)
  310. items.append('port=%r' % self.port)
  311. items.append('path=%r'% self.path)
  312. items.append('archive=%r' % self.archive)
  313. return ', '.join(items)
  314. def to_key_filename(self):
  315. name = re.sub('[^\w]', '_', self.path).strip('_')
  316. if self.proto != 'file':
  317. name = self.host + '__' + name
  318. return os.path.join(get_keys_dir(), name)
  319. def __repr__(self):
  320. return "Location(%s)" % self
  321. def location_validator(archive=None):
  322. def validator(text):
  323. try:
  324. loc = Location(text)
  325. except ValueError:
  326. raise argparse.ArgumentTypeError('Invalid location format: "%s"' % text)
  327. if archive is True and not loc.archive:
  328. raise argparse.ArgumentTypeError('"%s": No archive specified' % text)
  329. elif archive is False and loc.archive:
  330. raise argparse.ArgumentTypeError('"%s" No archive can be specified' % text)
  331. return loc
  332. return validator