helpers.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409
  1. from __future__ import with_statement
  2. import argparse
  3. from datetime import datetime, timedelta
  4. from fnmatch import fnmatchcase
  5. from operator import attrgetter
  6. import grp
  7. import os
  8. import pwd
  9. import re
  10. import stat
  11. import struct
  12. import sys
  13. import time
  14. import urllib
  15. class Purger(object):
  16. """Purging helper"""
  17. def __init__(self):
  18. self.items = {}
  19. def insert(self, key, value):
  20. self.items.setdefault(key, [])
  21. self.items[key].append(value)
  22. def purge(self, n, reverse=False):
  23. keep = []
  24. delete = []
  25. for key, values in sorted(self.items.items(), reverse=reverse):
  26. if n:
  27. values.sort(key=attrgetter('ts'), reverse=reverse)
  28. keep.append(values[0])
  29. delete += values[1:]
  30. n -= 1
  31. else:
  32. delete += values
  33. return keep, delete
  34. class Statistics(object):
  35. def __init__(self):
  36. self.osize = self.csize = self.usize = self.nfiles = 0
  37. def update(self, size, csize, unique):
  38. self.osize += size
  39. self.csize += csize
  40. if unique:
  41. self.usize += csize
  42. def print_(self):
  43. print 'Number of files: %d' % self.nfiles
  44. print 'Original size: %d (%s)' % (self.osize, format_file_size(self.osize))
  45. print 'Compressed size: %s (%s)'% (self.csize, format_file_size(self.csize))
  46. print 'Unique data: %d (%s)' % (self.usize, format_file_size(self.usize))
  47. # OSX filenames are UTF-8 Only so any non-utf8 filenames are url encoded
  48. if sys.platform == 'darwin':
  49. def encode_filename(name):
  50. try:
  51. name.decode('utf-8')
  52. return name
  53. except UnicodeDecodeError:
  54. return urllib.quote(name)
  55. else:
  56. encode_filename = str
  57. class Counter(object):
  58. __slots__ = ('v',)
  59. def __init__(self, value=0):
  60. self.v = value
  61. def inc(self, amount=1):
  62. self.v += amount
  63. def dec(self, amount=1):
  64. self.v -= amount
  65. def __cmp__(self, x):
  66. return cmp(self.v, x)
  67. def __repr__(self):
  68. return '<Counter(%r)>' % self.v
  69. def get_keys_dir():
  70. """Determine where to store keys and cache"""
  71. return os.environ.get('DARC_KEYS_DIR',
  72. os.path.join(os.path.expanduser('~'), '.darc', 'keys'))
  73. def get_cache_dir():
  74. """Determine where to store keys and cache"""
  75. return os.environ.get('DARC_CACHE_DIR',
  76. os.path.join(os.path.expanduser('~'), '.darc', 'cache'))
  77. def deferrable(f):
  78. def wrapper(*args, **kw):
  79. callback = kw.pop('callback', None)
  80. if callback:
  81. data = kw.pop('callback_data', None)
  82. try:
  83. res = f(*args, **kw)
  84. except Exception, e:
  85. callback(None, e, data)
  86. else:
  87. callback(res, None, data)
  88. else:
  89. return f(*args, **kw)
  90. return wrapper
  91. def error_callback(res, error, data):
  92. if res:
  93. raise res
  94. def to_localtime(ts):
  95. """Convert datetime object from UTC to local time zone"""
  96. return ts - timedelta(seconds=time.altzone)
  97. def read_set(path):
  98. """Read set from disk (as int32s)
  99. """
  100. with open(path, 'rb') as fd:
  101. data = fd.read()
  102. return set(struct.unpack('<%di' % (len(data) / 4), data))
  103. def write_set(s, path):
  104. """Write set to disk (as int32s)
  105. """
  106. with open(path, 'wb') as fd:
  107. fd.write(struct.pack('<%di' % len(s), *s))
  108. def encode_long(v):
  109. bytes = []
  110. while True:
  111. if v > 0x7f:
  112. bytes.append(0x80 | (v % 0x80))
  113. v >>= 7
  114. else:
  115. bytes.append(v)
  116. return ''.join(chr(x) for x in bytes)
  117. def decode_long(bytes):
  118. v = 0
  119. base = 0
  120. for x in bytes:
  121. b = ord(x)
  122. if b & 0x80:
  123. v += (b & 0x7f) << base
  124. base += 7
  125. else:
  126. return v + (b << base)
  127. def exclude_path(path, patterns):
  128. """Used by create and extract sub-commands to determine
  129. if an item should be processed or not
  130. """
  131. for pattern in (patterns or []):
  132. if pattern.match(path):
  133. return isinstance(pattern, ExcludePattern)
  134. return False
  135. class IncludePattern(object):
  136. """--include PATTERN
  137. >>> py = IncludePattern('*.py')
  138. >>> foo = IncludePattern('/foo')
  139. >>> py.match('/foo/foo.py')
  140. True
  141. >>> py.match('/bar/foo.java')
  142. False
  143. >>> foo.match('/foo/foo.py')
  144. True
  145. >>> foo.match('/bar/foo.java')
  146. False
  147. >>> foo.match('/foobar/foo.py')
  148. False
  149. >>> foo.match('/foo')
  150. True
  151. """
  152. def __init__(self, pattern):
  153. self.pattern = self.dirpattern = pattern
  154. if not pattern.endswith(os.path.sep):
  155. self.dirpattern += os.path.sep
  156. def match(self, path):
  157. dir, name = os.path.split(path)
  158. return (path == self.pattern
  159. or (dir + os.path.sep).startswith(self.dirpattern)
  160. or fnmatchcase(name, self.pattern))
  161. def __repr__(self):
  162. return '%s(%s)' % (type(self), self.pattern)
  163. class ExcludePattern(IncludePattern):
  164. """
  165. """
  166. def walk_path(path, skip_inodes=None):
  167. st = os.lstat(path)
  168. if skip_inodes and (st.st_ino, st.st_dev) in skip_inodes:
  169. return
  170. yield path, st
  171. if stat.S_ISDIR(st.st_mode):
  172. for f in os.listdir(path):
  173. for x in walk_path(os.path.join(path, f), skip_inodes):
  174. yield x
  175. def format_time(t):
  176. """Format datetime suitable for fixed length list output
  177. """
  178. if (datetime.now() - t).days < 365:
  179. return t.strftime('%b %d %H:%M')
  180. else:
  181. return t.strftime('%b %d %Y')
  182. def format_timedelta(td):
  183. """Format timedelta in a human friendly format
  184. >>> from datetime import datetime
  185. >>> t0 = datetime(2001, 1, 1, 10, 20, 3, 0)
  186. >>> t1 = datetime(2001, 1, 1, 12, 20, 4, 100000)
  187. >>> format_timedelta(t1 - t0)
  188. '2 hours 1.10 seconds'
  189. """
  190. # Since td.total_seconds() requires python 2.7
  191. ts = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / float(10**6)
  192. s = ts % 60
  193. m = int(ts / 60) % 60
  194. h = int(ts / 3600) % 24
  195. txt = '%.2f seconds' % s
  196. if m:
  197. txt = '%d minutes %s' % (m, txt)
  198. if h:
  199. txt = '%d hours %s' % (h, txt)
  200. if td.days:
  201. txt = '%d days %s' % (td.days, txt)
  202. return txt
  203. def format_file_mode(mod):
  204. """Format file mode bits for list output
  205. """
  206. def x(v):
  207. return ''.join(v & m and s or '-'
  208. for m, s in ((4, 'r'), (2, 'w'), (1, 'x')))
  209. return '%s%s%s' % (x(mod / 64), x(mod / 8), x(mod))
  210. def format_file_size(v):
  211. """Format file size into a human friendly format
  212. """
  213. if v > 1024 * 1024 * 1024:
  214. return '%.2f GB' % (v / 1024. / 1024. / 1024.)
  215. elif v > 1024 * 1024:
  216. return '%.2f MB' % (v / 1024. / 1024.)
  217. elif v > 1024:
  218. return '%.2f kB' % (v / 1024.)
  219. else:
  220. return str(v)
  221. class IntegrityError(Exception):
  222. """
  223. """
  224. def memoize(function):
  225. cache = {}
  226. def decorated_function(*args):
  227. try:
  228. return cache[args]
  229. except KeyError:
  230. val = function(*args)
  231. cache[args] = val
  232. return val
  233. return decorated_function
  234. @memoize
  235. def uid2user(uid):
  236. try:
  237. return pwd.getpwuid(uid).pw_name
  238. except KeyError:
  239. return None
  240. @memoize
  241. def user2uid(user):
  242. try:
  243. return pwd.getpwnam(user).pw_uid
  244. except KeyError:
  245. return None
  246. @memoize
  247. def gid2group(gid):
  248. try:
  249. return grp.getgrgid(gid).gr_name
  250. except KeyError:
  251. return None
  252. @memoize
  253. def group2gid(group):
  254. try:
  255. return grp.getgrnam(group).gr_gid
  256. except KeyError:
  257. return None
  258. class Location(object):
  259. """Object representing a store / archive location
  260. >>> Location('ssh://user@host:1234/some/path::archive')
  261. Location(proto='ssh', user='user', host='host', port=1234, path='/some/path', archive='archive')
  262. >>> Location('file:///some/path::archive')
  263. Location(proto='file', user=None, host=None, port=None, path='/some/path', archive='archive')
  264. >>> Location('user@host:/some/path::archive')
  265. Location(proto='ssh', user='user', host='host', port=22, path='/some/path', archive='archive')
  266. >>> Location('/some/path::archive')
  267. Location(proto='file', user=None, host=None, port=None, path='/some/path', archive='archive')
  268. """
  269. proto = user = host = port = path = archive = None
  270. ssh_re = re.compile(r'(?P<proto>ssh)://(?:(?P<user>[^@]+)@)?'
  271. r'(?P<host>[^:/#]+)(?::(?P<port>\d+))?'
  272. r'(?P<path>[^:]*)(?:::(?P<archive>.+))?')
  273. file_re = re.compile(r'(?P<proto>file)://'
  274. r'(?P<path>[^:]*)(?:::(?P<archive>.+))?')
  275. scp_re = re.compile(r'((?:(?P<user>[^@]+)@)?(?P<host>[^:/]+):)?'
  276. r'(?P<path>[^:]*)(?:::(?P<archive>.+))?')
  277. def __init__(self, text):
  278. if not self.parse(text):
  279. raise ValueError
  280. def parse(self, text):
  281. m = self.ssh_re.match(text)
  282. if m:
  283. self.proto = m.group('proto')
  284. self.user = m.group('user')
  285. self.host = m.group('host')
  286. self.port = m.group('port') and int(m.group('port')) or 22
  287. self.path = m.group('path')
  288. self.archive = m.group('archive')
  289. return True
  290. m = self.file_re.match(text)
  291. if m:
  292. self.proto = m.group('proto')
  293. self.path = m.group('path')
  294. self.archive = m.group('archive')
  295. return True
  296. m = self.scp_re.match(text)
  297. if m:
  298. self.user = m.group('user')
  299. self.host = m.group('host')
  300. self.path = m.group('path')
  301. self.archive = m.group('archive')
  302. self.proto = self.host and 'ssh' or 'file'
  303. if self.proto == 'ssh':
  304. self.port = 22
  305. return True
  306. return False
  307. def __str__(self):
  308. items = []
  309. items.append('proto=%r' % self.proto)
  310. items.append('user=%r' % self.user)
  311. items.append('host=%r' % self.host)
  312. items.append('port=%r' % self.port)
  313. items.append('path=%r'% self.path)
  314. items.append('archive=%r' % self.archive)
  315. return ', '.join(items)
  316. def to_key_filename(self):
  317. name = re.sub('[^\w]', '_', self.path).strip('_')
  318. if self.proto != 'file':
  319. name = self.host + '__' + name
  320. return os.path.join(get_keys_dir(), name)
  321. def __repr__(self):
  322. return "Location(%s)" % self
  323. def location_validator(archive=None):
  324. def validator(text):
  325. try:
  326. loc = Location(text)
  327. except ValueError:
  328. raise argparse.ArgumentTypeError('Invalid location format: "%s"' % text)
  329. if archive is True and not loc.archive:
  330. raise argparse.ArgumentTypeError('"%s": No archive specified' % text)
  331. elif archive is False and loc.archive:
  332. raise argparse.ArgumentTypeError('"%s" No archive can be specified' % text)
  333. return loc
  334. return validator