helpers.py 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626
  1. import argparse
  2. import binascii
  3. import grp
  4. import msgpack
  5. import os
  6. import pwd
  7. import re
  8. import sys
  9. import time
  10. from datetime import datetime, timezone, timedelta
  11. from fnmatch import translate
  12. from operator import attrgetter
  13. import fcntl
  14. import attic.hashindex
  15. import attic.chunker
  16. import attic.crypto
  17. class Error(Exception):
  18. """Error base class"""
  19. exit_code = 1
  20. def get_message(self):
  21. return 'Error: ' + type(self).__doc__.format(*self.args)
  22. class ExtensionModuleError(Error):
  23. """The Attic binary extension modules does not seem to be properly installed"""
  24. class UpgradableLock:
  25. class ReadLockFailed(Error):
  26. """Failed to acquire read lock on {}"""
  27. class WriteLockFailed(Error):
  28. """Failed to acquire write lock on {}"""
  29. def __init__(self, path, exclusive=False):
  30. self.path = path
  31. try:
  32. self.fd = open(path, 'r+')
  33. except IOError:
  34. self.fd = open(path, 'r')
  35. try:
  36. if exclusive:
  37. fcntl.lockf(self.fd, fcntl.LOCK_EX)
  38. else:
  39. fcntl.lockf(self.fd, fcntl.LOCK_SH)
  40. # Python 3.2 raises IOError, Python3.3+ raises OSError
  41. except (IOError, OSError):
  42. if exclusive:
  43. raise self.WriteLockFailed(self.path)
  44. else:
  45. raise self.ReadLockFailed(self.path)
  46. self.is_exclusive = exclusive
  47. def upgrade(self):
  48. try:
  49. fcntl.lockf(self.fd, fcntl.LOCK_EX)
  50. # Python 3.2 raises IOError, Python3.3+ raises OSError
  51. except (IOError, OSError):
  52. raise self.WriteLockFailed(self.path)
  53. self.is_exclusive = True
  54. def release(self):
  55. fcntl.lockf(self.fd, fcntl.LOCK_UN)
  56. self.fd.close()
  57. def check_extension_modules():
  58. import attic.platform
  59. if (attic.hashindex.API_VERSION != 2 or
  60. attic.chunker.API_VERSION != 2 or
  61. attic.crypto.API_VERSION != 2 or
  62. attic.platform.API_VERSION != 2):
  63. raise ExtensionModuleError
  64. class Manifest:
  65. def __init__(self, key, repository):
  66. self.archives = {}
  67. self.config = {}
  68. self.key = key
  69. self.repository = repository
  70. @classmethod
  71. def manifest_id(cls, repository):
  72. return b'\0' * repository.key_size
  73. @classmethod
  74. def load(cls, repository, key=None):
  75. from .key import key_factory
  76. cdata = repository.get(cls.manifest_id(repository))
  77. if not key:
  78. key = key_factory(repository, cdata)
  79. manifest = cls(key, repository)
  80. data = key.decrypt(None, cdata)
  81. manifest.id = key.id_hash(data)
  82. m = msgpack.unpackb(data)
  83. if not m.get(b'version') == 1:
  84. raise ValueError('Invalid manifest version')
  85. manifest.archives = dict((k.decode('utf-8'), v) for k, v in m[b'archives'].items())
  86. manifest.timestamp = m.get(b'timestamp')
  87. if manifest.timestamp:
  88. manifest.timestamp = manifest.timestamp.decode('ascii')
  89. manifest.config = m[b'config']
  90. return manifest, key
  91. def write(self):
  92. self.timestamp = datetime.utcnow().isoformat()
  93. data = msgpack.packb(StableDict({
  94. 'version': 1,
  95. 'archives': self.archives,
  96. 'timestamp': self.timestamp,
  97. 'config': self.config,
  98. }))
  99. self.id = self.key.id_hash(data)
  100. self.repository.put(self.manifest_id(self.repository), self.key.encrypt(data))
  101. def prune_within(archives, within):
  102. multiplier = {'H': 1, 'd': 24, 'w': 24*7, 'm': 24*31, 'y': 24*365}
  103. try:
  104. hours = int(within[:-1]) * multiplier[within[-1]]
  105. except (KeyError, ValueError):
  106. # I don't like how this displays the original exception too:
  107. raise argparse.ArgumentTypeError('Unable to parse --within option: "%s"' % within)
  108. if hours <= 0:
  109. raise argparse.ArgumentTypeError('Number specified using --within option must be positive')
  110. target = datetime.now(timezone.utc) - timedelta(seconds=hours*60*60)
  111. return [a for a in archives if a.ts > target]
  112. def prune_split(archives, pattern, n, skip=[]):
  113. last = None
  114. keep = []
  115. if n == 0:
  116. return keep
  117. for a in sorted(archives, key=attrgetter('ts'), reverse=True):
  118. period = to_localtime(a.ts).strftime(pattern)
  119. if period != last:
  120. last = period
  121. if a not in skip:
  122. keep.append(a)
  123. if len(keep) == n:
  124. break
  125. return keep
  126. class Statistics:
  127. def __init__(self):
  128. self.osize = self.csize = self.usize = self.nfiles = 0
  129. def update(self, size, csize, unique):
  130. self.osize += size
  131. self.csize += csize
  132. if unique:
  133. self.usize += csize
  134. def print_(self, label, cache):
  135. total_size, total_csize, unique_size, unique_csize = cache.chunks.summarize()
  136. print()
  137. print(' Original size Compressed size Deduplicated size')
  138. print('%-15s %20s %20s %20s' % (label, format_file_size(self.osize), format_file_size(self.csize), format_file_size(self.usize)))
  139. print('All archives: %20s %20s %20s' % (format_file_size(total_size), format_file_size(total_csize), format_file_size(unique_csize)))
  140. def show_progress(self, item=None, final=False):
  141. if not final:
  142. path = remove_surrogates(item[b'path']) if item else ''
  143. if len(path) > 43:
  144. path = '%s...%s' % (path[:20], path[-20:])
  145. msg = '%9s O %9s C %9s D %-43s' % (
  146. format_file_size(self.osize), format_file_size(self.csize), format_file_size(self.usize), path)
  147. else:
  148. msg = ' ' * 79
  149. print(msg, end='\r')
  150. sys.stdout.flush()
  151. def get_keys_dir():
  152. """Determine where to repository keys and cache"""
  153. return os.environ.get('ATTIC_KEYS_DIR',
  154. os.path.join(os.path.expanduser('~'), '.attic', 'keys'))
  155. def get_cache_dir():
  156. """Determine where to repository keys and cache"""
  157. return os.environ.get('ATTIC_CACHE_DIR',
  158. os.path.join(os.path.expanduser('~'), '.cache', 'attic'))
  159. def to_localtime(ts):
  160. """Convert datetime object from UTC to local time zone"""
  161. return datetime(*time.localtime((ts - datetime(1970, 1, 1, tzinfo=timezone.utc)).total_seconds())[:6])
  162. def parse_timestamp(timestamp):
  163. """Parse a ISO 8601 timestamp string"""
  164. if '.' in timestamp: # microseconds might not be pressent
  165. return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%f').replace(tzinfo=timezone.utc)
  166. else:
  167. return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc)
  168. def update_excludes(args):
  169. """Merge exclude patterns from files with those on command line.
  170. Empty lines and lines starting with '#' are ignored, but whitespace
  171. is not stripped."""
  172. if hasattr(args, 'exclude_files') and args.exclude_files:
  173. if not hasattr(args, 'excludes') or args.excludes is None:
  174. args.excludes = []
  175. for file in args.exclude_files:
  176. patterns = [line.rstrip('\r\n') for line in file if not line.startswith('#')]
  177. args.excludes += [ExcludePattern(pattern) for pattern in patterns if pattern]
  178. file.close()
  179. def adjust_patterns(paths, excludes):
  180. if paths:
  181. return (excludes or []) + [IncludePattern(path) for path in paths] + [ExcludePattern('*')]
  182. else:
  183. return excludes
  184. def exclude_path(path, patterns):
  185. """Used by create and extract sub-commands to determine
  186. whether or not an item should be processed.
  187. """
  188. for pattern in (patterns or []):
  189. if pattern.match(path):
  190. return isinstance(pattern, ExcludePattern)
  191. return False
  192. # For both IncludePattern and ExcludePattern, we require that
  193. # the pattern either match the whole path or an initial segment
  194. # of the path up to but not including a path separator. To
  195. # unify the two cases, we add a path separator to the end of
  196. # the path before matching.
  197. class IncludePattern:
  198. """Literal files or directories listed on the command line
  199. for some operations (e.g. extract, but not create).
  200. If a directory is specified, all paths that start with that
  201. path match as well. A trailing slash makes no difference.
  202. """
  203. def __init__(self, pattern):
  204. self.pattern = os.path.normpath(pattern).rstrip(os.path.sep)+os.path.sep
  205. def match(self, path):
  206. return (path+os.path.sep).startswith(self.pattern)
  207. def __repr__(self):
  208. return '%s(%s)' % (type(self), self.pattern)
  209. class ExcludePattern(IncludePattern):
  210. """Shell glob patterns to exclude. A trailing slash means to
  211. exclude the contents of a directory, but not the directory itself.
  212. """
  213. def __init__(self, pattern):
  214. if pattern.endswith(os.path.sep):
  215. self.pattern = os.path.normpath(pattern).rstrip(os.path.sep)+os.path.sep+'*'+os.path.sep
  216. else:
  217. self.pattern = os.path.normpath(pattern)+os.path.sep+'*'
  218. # fnmatch and re.match both cache compiled regular expressions.
  219. # Nevertheless, this is about 10 times faster.
  220. self.regex = re.compile(translate(self.pattern))
  221. def match(self, path):
  222. return self.regex.match(path+os.path.sep) is not None
  223. def __repr__(self):
  224. return '%s(%s)' % (type(self), self.pattern)
  225. def timestamp(s):
  226. """Convert a --timestamp=s argument to a datetime object"""
  227. try:
  228. # is it pointing to a file / directory?
  229. ts = os.stat(s).st_mtime
  230. return datetime.utcfromtimestamp(ts)
  231. except OSError:
  232. # didn't work, try parsing as timestamp. UTC, no TZ, no microsecs support.
  233. for format in ('%Y-%m-%dT%H:%M:%SZ', '%Y-%m-%dT%H:%M:%S+00:00',
  234. '%Y-%m-%dT%H:%M:%S', '%Y-%m-%d %H:%M:%S',
  235. '%Y-%m-%dT%H:%M', '%Y-%m-%d %H:%M',
  236. '%Y-%m-%d', '%Y-%j',
  237. ):
  238. try:
  239. return datetime.strptime(s, format)
  240. except ValueError:
  241. continue
  242. raise ValueError
  243. def is_cachedir(path):
  244. """Determines whether the specified path is a cache directory (and
  245. therefore should potentially be excluded from the backup) according to
  246. the CACHEDIR.TAG protocol
  247. (http://www.brynosaurus.com/cachedir/spec.html).
  248. """
  249. tag_contents = b'Signature: 8a477f597d28d172789f06886806bc55'
  250. tag_path = os.path.join(path, 'CACHEDIR.TAG')
  251. try:
  252. if os.path.exists(tag_path):
  253. with open(tag_path, 'rb') as tag_file:
  254. tag_data = tag_file.read(len(tag_contents))
  255. if tag_data == tag_contents:
  256. return True
  257. except OSError:
  258. pass
  259. return False
  260. def format_time(t):
  261. """Format datetime suitable for fixed length list output
  262. """
  263. if abs((datetime.now() - t).days) < 365:
  264. return t.strftime('%b %d %H:%M')
  265. else:
  266. return t.strftime('%b %d %Y')
  267. def format_timedelta(td):
  268. """Format timedelta in a human friendly format
  269. """
  270. # Since td.total_seconds() requires python 2.7
  271. ts = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6)
  272. s = ts % 60
  273. m = int(ts / 60) % 60
  274. h = int(ts / 3600) % 24
  275. txt = '%.2f seconds' % s
  276. if m:
  277. txt = '%d minutes %s' % (m, txt)
  278. if h:
  279. txt = '%d hours %s' % (h, txt)
  280. if td.days:
  281. txt = '%d days %s' % (td.days, txt)
  282. return txt
  283. def format_file_mode(mod):
  284. """Format file mode bits for list output
  285. """
  286. def x(v):
  287. return ''.join(v & m and s or '-'
  288. for m, s in ((4, 'r'), (2, 'w'), (1, 'x')))
  289. return '%s%s%s' % (x(mod // 64), x(mod // 8), x(mod))
  290. def format_file_size(v):
  291. """Format file size into a human friendly format
  292. """
  293. if abs(v) > 10**12:
  294. return '%.2f TB' % (v / 10**12)
  295. elif abs(v) > 10**9:
  296. return '%.2f GB' % (v / 10**9)
  297. elif abs(v) > 10**6:
  298. return '%.2f MB' % (v / 10**6)
  299. elif abs(v) > 10**3:
  300. return '%.2f kB' % (v / 10**3)
  301. else:
  302. return '%d B' % v
  303. def format_archive(archive):
  304. return '%-36s %s' % (archive.name, to_localtime(archive.ts).strftime('%c'))
  305. class IntegrityError(Error):
  306. """Data integrity error"""
  307. def memoize(function):
  308. cache = {}
  309. def decorated_function(*args):
  310. try:
  311. return cache[args]
  312. except KeyError:
  313. val = function(*args)
  314. cache[args] = val
  315. return val
  316. return decorated_function
  317. @memoize
  318. def uid2user(uid, default=None):
  319. try:
  320. return pwd.getpwuid(uid).pw_name
  321. except KeyError:
  322. return default
  323. @memoize
  324. def user2uid(user, default=None):
  325. try:
  326. return user and pwd.getpwnam(user).pw_uid
  327. except KeyError:
  328. return default
  329. @memoize
  330. def gid2group(gid, default=None):
  331. try:
  332. return grp.getgrgid(gid).gr_name
  333. except KeyError:
  334. return default
  335. @memoize
  336. def group2gid(group, default=None):
  337. try:
  338. return group and grp.getgrnam(group).gr_gid
  339. except KeyError:
  340. return default
  341. def posix_acl_use_stored_uid_gid(acl):
  342. """Replace the user/group field with the stored uid/gid
  343. """
  344. entries = []
  345. for entry in acl.decode('ascii').split('\n'):
  346. if entry:
  347. fields = entry.split(':')
  348. if len(fields) == 4:
  349. entries.append(':'.join([fields[0], fields[3], fields[2]]))
  350. else:
  351. entries.append(entry)
  352. return ('\n'.join(entries)).encode('ascii')
  353. class Location:
  354. """Object representing a repository / archive location
  355. """
  356. proto = user = host = port = path = archive = None
  357. ssh_re = re.compile(r'(?P<proto>ssh)://(?:(?P<user>[^@]+)@)?'
  358. r'(?P<host>[^:/#]+)(?::(?P<port>\d+))?'
  359. r'(?P<path>[^:]+)(?:::(?P<archive>.+))?$')
  360. file_re = re.compile(r'(?P<proto>file)://'
  361. r'(?P<path>[^:]+)(?:::(?P<archive>.+))?$')
  362. scp_re = re.compile(r'((?:(?P<user>[^@]+)@)?(?P<host>[^:/]+):)?'
  363. r'(?P<path>[^:]+)(?:::(?P<archive>.+))?$')
  364. def __init__(self, text):
  365. self.orig = text
  366. if not self.parse(text):
  367. raise ValueError
  368. def parse(self, text):
  369. m = self.ssh_re.match(text)
  370. if m:
  371. self.proto = m.group('proto')
  372. self.user = m.group('user')
  373. self.host = m.group('host')
  374. self.port = m.group('port') and int(m.group('port')) or None
  375. self.path = m.group('path')
  376. self.archive = m.group('archive')
  377. return True
  378. m = self.file_re.match(text)
  379. if m:
  380. self.proto = m.group('proto')
  381. self.path = m.group('path')
  382. self.archive = m.group('archive')
  383. return True
  384. m = self.scp_re.match(text)
  385. if m:
  386. self.user = m.group('user')
  387. self.host = m.group('host')
  388. self.path = m.group('path')
  389. self.archive = m.group('archive')
  390. self.proto = self.host and 'ssh' or 'file'
  391. return True
  392. return False
  393. def __str__(self):
  394. items = []
  395. items.append('proto=%r' % self.proto)
  396. items.append('user=%r' % self.user)
  397. items.append('host=%r' % self.host)
  398. items.append('port=%r' % self.port)
  399. items.append('path=%r' % self.path)
  400. items.append('archive=%r' % self.archive)
  401. return ', '.join(items)
  402. def to_key_filename(self):
  403. name = re.sub('[^\w]', '_', self.path).strip('_')
  404. if self.proto != 'file':
  405. name = self.host + '__' + name
  406. return os.path.join(get_keys_dir(), name)
  407. def __repr__(self):
  408. return "Location(%s)" % self
  409. def canonical_path(self):
  410. if self.proto == 'file':
  411. return self.path
  412. else:
  413. if self.path and self.path.startswith('~'):
  414. path = '/' + self.path
  415. elif self.path and not self.path.startswith('/'):
  416. path = '/~/' + self.path
  417. else:
  418. path = self.path
  419. return 'ssh://{}{}{}{}'.format('{}@'.format(self.user) if self.user else '',
  420. self.host,
  421. ':{}'.format(self.port) if self.port else '',
  422. path)
  423. def location_validator(archive=None):
  424. def validator(text):
  425. try:
  426. loc = Location(text)
  427. except ValueError:
  428. raise argparse.ArgumentTypeError('Invalid location format: "%s"' % text)
  429. if archive is True and not loc.archive:
  430. raise argparse.ArgumentTypeError('"%s": No archive specified' % text)
  431. elif archive is False and loc.archive:
  432. raise argparse.ArgumentTypeError('"%s" No archive can be specified' % text)
  433. return loc
  434. return validator
  435. def read_msgpack(filename):
  436. with open(filename, 'rb') as fd:
  437. return msgpack.unpack(fd)
  438. def write_msgpack(filename, d):
  439. with open(filename + '.tmp', 'wb') as fd:
  440. msgpack.pack(d, fd)
  441. fd.flush()
  442. os.fsync(fd.fileno())
  443. os.rename(filename + '.tmp', filename)
  444. def decode_dict(d, keys, encoding='utf-8', errors='surrogateescape'):
  445. for key in keys:
  446. if isinstance(d.get(key), bytes):
  447. d[key] = d[key].decode(encoding, errors)
  448. return d
  449. def remove_surrogates(s, errors='replace'):
  450. """Replace surrogates generated by fsdecode with '?'
  451. """
  452. return s.encode('utf-8', errors).decode('utf-8')
  453. _safe_re = re.compile(r'^((\.\.)?/+)+')
  454. def make_path_safe(path):
  455. """Make path safe by making it relative and local
  456. """
  457. return _safe_re.sub('', path) or '.'
  458. def daemonize():
  459. """Detach process from controlling terminal and run in background
  460. """
  461. pid = os.fork()
  462. if pid:
  463. os._exit(0)
  464. os.setsid()
  465. pid = os.fork()
  466. if pid:
  467. os._exit(0)
  468. os.chdir('/')
  469. os.close(0)
  470. os.close(1)
  471. os.close(2)
  472. fd = os.open('/dev/null', os.O_RDWR)
  473. os.dup2(fd, 0)
  474. os.dup2(fd, 1)
  475. os.dup2(fd, 2)
  476. class StableDict(dict):
  477. """A dict subclass with stable items() ordering"""
  478. def items(self):
  479. return sorted(super(StableDict, self).items())
  480. if sys.version < '3.3':
  481. # st_mtime_ns attribute only available in 3.3+
  482. def st_mtime_ns(st):
  483. return int(st.st_mtime * 1e9)
  484. # unhexlify in < 3.3 incorrectly only accepts bytes input
  485. def unhexlify(data):
  486. if isinstance(data, str):
  487. data = data.encode('ascii')
  488. return binascii.unhexlify(data)
  489. else:
  490. def st_mtime_ns(st):
  491. return st.st_mtime_ns
  492. unhexlify = binascii.unhexlify
  493. def bigint_to_int(mtime):
  494. """Convert bytearray to int
  495. """
  496. if isinstance(mtime, bytes):
  497. return int.from_bytes(mtime, 'little', signed=True)
  498. return mtime
  499. def int_to_bigint(value):
  500. """Convert integers larger than 64 bits to bytearray
  501. Smaller integers are left alone
  502. """
  503. if value.bit_length() > 63:
  504. return value.to_bytes((value.bit_length() + 9) // 8, 'little', signed=True)
  505. return value