helpers.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486
  1. import argparse
  2. import binascii
  3. import grp
  4. import msgpack
  5. import os
  6. import pwd
  7. import re
  8. import stat
  9. import sys
  10. import time
  11. from datetime import datetime, timezone, timedelta
  12. from fnmatch import translate
  13. from operator import attrgetter
  14. import fcntl
  15. class Error(Exception):
  16. """Error base class"""
  17. exit_code = 1
  18. def get_message(self):
  19. return 'Error: ' + type(self).__doc__.format(*self.args)
  20. class UpgradableLock:
  21. class LockUpgradeFailed(Error):
  22. """Failed to acquire write lock on {}"""
  23. def __init__(self, path, exclusive=False):
  24. self.path = path
  25. try:
  26. self.fd = open(path, 'r+')
  27. except IOError:
  28. self.fd = open(path, 'r')
  29. if exclusive:
  30. fcntl.lockf(self.fd, fcntl.LOCK_EX)
  31. else:
  32. fcntl.lockf(self.fd, fcntl.LOCK_SH)
  33. self.is_exclusive = exclusive
  34. def upgrade(self):
  35. try:
  36. fcntl.lockf(self.fd, fcntl.LOCK_EX)
  37. except OSError as e:
  38. raise self.LockUpgradeFailed(self.path)
  39. self.is_exclusive = True
  40. def release(self):
  41. fcntl.lockf(self.fd, fcntl.LOCK_UN)
  42. self.fd.close()
  43. class Manifest:
  44. MANIFEST_ID = b'\0' * 32
  45. def __init__(self, key, repository):
  46. self.archives = {}
  47. self.config = {}
  48. self.key = key
  49. self.repository = repository
  50. @classmethod
  51. def load(cls, repository):
  52. from .key import key_factory
  53. cdata = repository.get(cls.MANIFEST_ID)
  54. key = key_factory(repository, cdata)
  55. manifest = cls(key, repository)
  56. data = key.decrypt(None, cdata)
  57. manifest.id = key.id_hash(data)
  58. m = msgpack.unpackb(data)
  59. if not m.get(b'version') == 1:
  60. raise ValueError('Invalid manifest version')
  61. manifest.archives = dict((k.decode('utf-8'), v) for k,v in m[b'archives'].items())
  62. manifest.timestamp = m.get(b'timestamp')
  63. if manifest.timestamp:
  64. manifest.timestamp = manifest.timestamp.decode('ascii')
  65. manifest.config = m[b'config']
  66. return manifest, key
  67. def write(self):
  68. self.timestamp = datetime.utcnow().isoformat()
  69. data = msgpack.packb({
  70. 'version': 1,
  71. 'archives': self.archives,
  72. 'timestamp': self.timestamp,
  73. 'config': self.config,
  74. })
  75. self.id = self.key.id_hash(data)
  76. self.repository.put(self.MANIFEST_ID, self.key.encrypt(data))
  77. def prune_within(archives, within):
  78. multiplier = {'H': 1, 'd': 24, 'w': 24*7, 'm': 24*31, 'y': 24*365}
  79. try:
  80. hours = int(within[:-1]) * multiplier[within[-1]]
  81. except (KeyError, ValueError):
  82. # I don't like how this displays the original exception too:
  83. raise argparse.ArgumentTypeError('Unable to parse --within option: "%s"' % within)
  84. if hours <= 0:
  85. raise argparse.ArgumentTypeError('Number specified using --within option must be positive')
  86. target = datetime.now(timezone.utc) - timedelta(seconds=hours*60*60)
  87. return [a for a in archives if a.ts > target]
  88. def prune_split(archives, pattern, n, skip=[]):
  89. last = None
  90. keep = []
  91. if n == 0:
  92. return keep
  93. for a in sorted(archives, key=attrgetter('ts'), reverse=True):
  94. period = a.ts.strftime(pattern)
  95. if period != last:
  96. last = period
  97. if a not in skip:
  98. keep.append(a)
  99. if len(keep) == n: break
  100. return keep
  101. class Statistics:
  102. def __init__(self):
  103. self.osize = self.csize = self.usize = self.nfiles = 0
  104. def update(self, size, csize, unique):
  105. self.osize += size
  106. self.csize += csize
  107. if unique:
  108. self.usize += csize
  109. def print_(self):
  110. print('Number of files: %d' % self.nfiles)
  111. print('Original size: %d (%s)' % (self.osize, format_file_size(self.osize)))
  112. print('Compressed size: %s (%s)' % (self.csize, format_file_size(self.csize)))
  113. print('Unique data: %d (%s)' % (self.usize, format_file_size(self.usize)))
  114. def get_keys_dir():
  115. """Determine where to repository keys and cache"""
  116. return os.environ.get('ATTIC_KEYS_DIR',
  117. os.path.join(os.path.expanduser('~'), '.attic', 'keys'))
  118. def get_cache_dir():
  119. """Determine where to repository keys and cache"""
  120. return os.environ.get('ATTIC_CACHE_DIR',
  121. os.path.join(os.path.expanduser('~'), '.cache', 'attic'))
  122. def to_localtime(ts):
  123. """Convert datetime object from UTC to local time zone"""
  124. return datetime(*time.localtime((ts - datetime(1970, 1, 1, tzinfo=timezone.utc)).total_seconds())[:6])
  125. def update_excludes(args):
  126. """Merge exclude patterns from files with those on command line.
  127. Empty lines and lines starting with '#' are ignored, but whitespace
  128. is not stripped."""
  129. if hasattr(args, 'exclude_files') and args.exclude_files:
  130. if not hasattr(args, 'excludes') or args.excludes is None:
  131. args.excludes = []
  132. for file in args.exclude_files:
  133. patterns = [line.rstrip('\r\n') for line in file if not line.startswith('#')]
  134. args.excludes += [ExcludePattern(pattern) for pattern in patterns if pattern]
  135. file.close()
  136. def adjust_patterns(paths, excludes):
  137. if paths:
  138. return (excludes or []) + [IncludePattern(path) for path in paths] + [ExcludePattern('*')]
  139. else:
  140. return excludes
  141. def exclude_path(path, patterns):
  142. """Used by create and extract sub-commands to determine
  143. whether or not an item should be processed.
  144. """
  145. for pattern in (patterns or []):
  146. if pattern.match(path):
  147. return isinstance(pattern, ExcludePattern)
  148. return False
  149. # For both IncludePattern and ExcludePattern, we require that
  150. # the pattern either match the whole path or an initial segment
  151. # of the path up to but not including a path separator. To
  152. # unify the two cases, we add a path separator to the end of
  153. # the path before matching.
  154. class IncludePattern:
  155. """Literal files or directories listed on the command line
  156. for some operations (e.g. extract, but not create).
  157. If a directory is specified, all paths that start with that
  158. path match as well. A trailing slash makes no difference.
  159. """
  160. def __init__(self, pattern):
  161. self.pattern = pattern.rstrip(os.path.sep)+os.path.sep
  162. def match(self, path):
  163. return (path+os.path.sep).startswith(self.pattern)
  164. def __repr__(self):
  165. return '%s(%s)' % (type(self), self.pattern)
  166. class ExcludePattern(IncludePattern):
  167. """Shell glob patterns to exclude. A trailing slash means to
  168. exclude the contents of a directory, but not the directory itself.
  169. """
  170. def __init__(self, pattern):
  171. if pattern.endswith(os.path.sep):
  172. self.pattern = pattern+'*'+os.path.sep
  173. else:
  174. self.pattern = pattern+os.path.sep+'*'
  175. # fnmatch and re.match both cache compiled regular expressions.
  176. # Nevertheless, this is about 10 times faster.
  177. self.regex = re.compile(translate(self.pattern))
  178. def match(self, path):
  179. return self.regex.match(path+os.path.sep) is not None
  180. def __repr__(self):
  181. return '%s(%s)' % (type(self), self.pattern)
  182. def walk_path(path, skip_inodes=None):
  183. st = os.lstat(path)
  184. if skip_inodes and (st.st_ino, st.st_dev) in skip_inodes:
  185. return
  186. yield path, st
  187. if stat.S_ISDIR(st.st_mode):
  188. for f in os.listdir(path):
  189. for x in walk_path(os.path.join(path, f), skip_inodes):
  190. yield x
  191. def format_time(t):
  192. """Format datetime suitable for fixed length list output
  193. """
  194. if (datetime.now() - t).days < 365:
  195. return t.strftime('%b %d %H:%M')
  196. else:
  197. return t.strftime('%b %d %Y')
  198. def format_timedelta(td):
  199. """Format timedelta in a human friendly format
  200. """
  201. # Since td.total_seconds() requires python 2.7
  202. ts = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6)
  203. s = ts % 60
  204. m = int(ts / 60) % 60
  205. h = int(ts / 3600) % 24
  206. txt = '%.2f seconds' % s
  207. if m:
  208. txt = '%d minutes %s' % (m, txt)
  209. if h:
  210. txt = '%d hours %s' % (h, txt)
  211. if td.days:
  212. txt = '%d days %s' % (td.days, txt)
  213. return txt
  214. def format_file_mode(mod):
  215. """Format file mode bits for list output
  216. """
  217. def x(v):
  218. return ''.join(v & m and s or '-'
  219. for m, s in ((4, 'r'), (2, 'w'), (1, 'x')))
  220. return '%s%s%s' % (x(mod // 64), x(mod // 8), x(mod))
  221. def format_file_size(v):
  222. """Format file size into a human friendly format
  223. """
  224. if v > 1024 * 1024 * 1024:
  225. return '%.2f GB' % (v / 1024. / 1024. / 1024.)
  226. elif v > 1024 * 1024:
  227. return '%.2f MB' % (v / 1024. / 1024.)
  228. elif v > 1024:
  229. return '%.2f kB' % (v / 1024.)
  230. else:
  231. return '%d B' % v
  232. class IntegrityError(Exception):
  233. """
  234. """
  235. def memoize(function):
  236. cache = {}
  237. def decorated_function(*args):
  238. try:
  239. return cache[args]
  240. except KeyError:
  241. val = function(*args)
  242. cache[args] = val
  243. return val
  244. return decorated_function
  245. @memoize
  246. def uid2user(uid):
  247. try:
  248. return pwd.getpwuid(uid).pw_name
  249. except KeyError:
  250. return None
  251. @memoize
  252. def user2uid(user):
  253. try:
  254. return user and pwd.getpwnam(user).pw_uid
  255. except KeyError:
  256. return None
  257. @memoize
  258. def gid2group(gid):
  259. try:
  260. return grp.getgrgid(gid).gr_name
  261. except KeyError:
  262. return None
  263. @memoize
  264. def group2gid(group):
  265. try:
  266. return group and grp.getgrnam(group).gr_gid
  267. except KeyError:
  268. return None
  269. class Location:
  270. """Object representing a repository / archive location
  271. """
  272. proto = user = host = port = path = archive = None
  273. ssh_re = re.compile(r'(?P<proto>ssh)://(?:(?P<user>[^@]+)@)?'
  274. r'(?P<host>[^:/#]+)(?::(?P<port>\d+))?'
  275. r'(?P<path>[^:]+)(?:::(?P<archive>.+))?')
  276. file_re = re.compile(r'(?P<proto>file)://'
  277. r'(?P<path>[^:]+)(?:::(?P<archive>.+))?')
  278. scp_re = re.compile(r'((?:(?P<user>[^@]+)@)?(?P<host>[^:/]+):)?'
  279. r'(?P<path>[^:]+)(?:::(?P<archive>.+))?')
  280. def __init__(self, text):
  281. self.orig = text
  282. if not self.parse(text):
  283. raise ValueError
  284. def parse(self, text):
  285. m = self.ssh_re.match(text)
  286. if m:
  287. self.proto = m.group('proto')
  288. self.user = m.group('user')
  289. self.host = m.group('host')
  290. self.port = m.group('port') and int(m.group('port')) or None
  291. self.path = m.group('path')
  292. self.archive = m.group('archive')
  293. return True
  294. m = self.file_re.match(text)
  295. if m:
  296. self.proto = m.group('proto')
  297. self.path = m.group('path')
  298. self.archive = m.group('archive')
  299. return True
  300. m = self.scp_re.match(text)
  301. if m:
  302. self.user = m.group('user')
  303. self.host = m.group('host')
  304. self.path = m.group('path')
  305. self.archive = m.group('archive')
  306. self.proto = self.host and 'ssh' or 'file'
  307. return True
  308. return False
  309. def __str__(self):
  310. items = []
  311. items.append('proto=%r' % self.proto)
  312. items.append('user=%r' % self.user)
  313. items.append('host=%r' % self.host)
  314. items.append('port=%r' % self.port)
  315. items.append('path=%r' % self.path)
  316. items.append('archive=%r' % self.archive)
  317. return ', '.join(items)
  318. def to_key_filename(self):
  319. name = re.sub('[^\w]', '_', self.path).strip('_')
  320. if self.proto != 'file':
  321. name = self.host + '__' + name
  322. return os.path.join(get_keys_dir(), name)
  323. def __repr__(self):
  324. return "Location(%s)" % self
  325. def location_validator(archive=None):
  326. def validator(text):
  327. try:
  328. loc = Location(text)
  329. except ValueError:
  330. raise argparse.ArgumentTypeError('Invalid location format: "%s"' % text)
  331. if archive is True and not loc.archive:
  332. raise argparse.ArgumentTypeError('"%s": No archive specified' % text)
  333. elif archive is False and loc.archive:
  334. raise argparse.ArgumentTypeError('"%s" No archive can be specified' % text)
  335. return loc
  336. return validator
  337. def read_msgpack(filename):
  338. with open(filename, 'rb') as fd:
  339. return msgpack.unpack(fd)
  340. def write_msgpack(filename, d):
  341. with open(filename + '.tmp', 'wb') as fd:
  342. msgpack.pack(d, fd)
  343. fd.flush()
  344. os.fsync(fd)
  345. os.rename(filename + '.tmp', filename)
  346. def decode_dict(d, keys, encoding='utf-8', errors='surrogateescape'):
  347. for key in keys:
  348. if isinstance(d.get(key), bytes):
  349. d[key] = d[key].decode(encoding, errors)
  350. return d
  351. def remove_surrogates(s, errors='replace'):
  352. """Replace surrogates generated by fsdecode with '?'
  353. """
  354. return s.encode('utf-8', errors).decode('utf-8')
  355. _safe_re = re.compile('^((..)?/+)+')
  356. def make_path_safe(path):
  357. """Make path safe by making it relative and local
  358. """
  359. return _safe_re.sub('', path) or '.'
  360. def daemonize():
  361. """Detach process from controlling terminal and run in background
  362. """
  363. pid = os.fork()
  364. if pid:
  365. os._exit(0)
  366. os.setsid()
  367. pid = os.fork()
  368. if pid:
  369. os._exit(0)
  370. os.chdir('/')
  371. os.close(0)
  372. os.close(1)
  373. os.close(2)
  374. fd = os.open('/dev/null', os.O_RDWR)
  375. os.dup2(fd, 0)
  376. os.dup2(fd, 1)
  377. os.dup2(fd, 2)
  378. if sys.version < '3.3':
  379. # st_mtime_ns attribute only available in 3.3+
  380. def st_mtime_ns(st):
  381. return int(st.st_mtime * 1e9)
  382. # unhexlify in < 3.3 incorrectly only accepts bytes input
  383. def unhexlify(data):
  384. if isinstance(data, str):
  385. data = data.encode('ascii')
  386. return binascii.unhexlify(data)
  387. else:
  388. def st_mtime_ns(st):
  389. return st.st_mtime_ns
  390. unhexlify = binascii.unhexlify