helpers.py 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964
  1. from .support import argparse # see support/__init__.py docstring, DEPRECATED - remove after requiring py 3.4
  2. import binascii
  3. from collections import namedtuple
  4. from functools import wraps
  5. import grp
  6. import os
  7. import pwd
  8. import re
  9. try:
  10. from shutil import get_terminal_size
  11. except ImportError:
  12. def get_terminal_size(fallback=(80, 24)):
  13. TerminalSize = namedtuple('TerminalSize', ['columns', 'lines'])
  14. return TerminalSize(int(os.environ.get('COLUMNS', fallback[0])), int(os.environ.get('LINES', fallback[1])))
  15. import sys
  16. import platform
  17. import time
  18. import unicodedata
  19. from datetime import datetime, timezone, timedelta
  20. from fnmatch import translate
  21. from operator import attrgetter
  22. from . import hashindex
  23. from . import chunker
  24. from . import crypto
  25. import msgpack
  26. import msgpack.fallback
  27. # return codes returned by borg command
  28. # when borg is killed by signal N, rc = 128 + N
  29. EXIT_SUCCESS = 0 # everything done, no problems
  30. EXIT_WARNING = 1 # reached normal end of operation, but there were issues
  31. EXIT_ERROR = 2 # terminated abruptly, did not reach end of operation
  32. class Error(Exception):
  33. """Error base class"""
  34. # if we raise such an Error and it is only catched by the uppermost
  35. # exception handler (that exits short after with the given exit_code),
  36. # it is always a (fatal and abrupt) EXIT_ERROR, never just a warning.
  37. exit_code = EXIT_ERROR
  38. # show a traceback?
  39. traceback = False
  40. def get_message(self):
  41. return type(self).__doc__.format(*self.args)
  42. class ErrorWithTraceback(Error):
  43. """like Error, but show a traceback also"""
  44. traceback = True
  45. class IntegrityError(ErrorWithTraceback):
  46. """Data integrity error"""
  47. class ExtensionModuleError(Error):
  48. """The Borg binary extension modules do not seem to be properly installed"""
  49. def check_extension_modules():
  50. from . import platform
  51. if hashindex.API_VERSION != 2:
  52. raise ExtensionModuleError
  53. if chunker.API_VERSION != 2:
  54. raise ExtensionModuleError
  55. if crypto.API_VERSION != 2:
  56. raise ExtensionModuleError
  57. if platform.API_VERSION != 2:
  58. raise ExtensionModuleError
  59. class Manifest:
  60. MANIFEST_ID = b'\0' * 32
  61. def __init__(self, key, repository):
  62. self.archives = {}
  63. self.config = {}
  64. self.key = key
  65. self.repository = repository
  66. @classmethod
  67. def load(cls, repository, key=None):
  68. from .key import key_factory
  69. cdata = repository.get(cls.MANIFEST_ID)
  70. if not key:
  71. key = key_factory(repository, cdata)
  72. manifest = cls(key, repository)
  73. data = key.decrypt(None, cdata)
  74. manifest.id = key.id_hash(data)
  75. m = msgpack.unpackb(data)
  76. if not m.get(b'version') == 1:
  77. raise ValueError('Invalid manifest version')
  78. manifest.archives = dict((k.decode('utf-8'), v) for k, v in m[b'archives'].items())
  79. manifest.timestamp = m.get(b'timestamp')
  80. if manifest.timestamp:
  81. manifest.timestamp = manifest.timestamp.decode('ascii')
  82. manifest.config = m[b'config']
  83. return manifest, key
  84. def write(self):
  85. self.timestamp = datetime.utcnow().isoformat()
  86. data = msgpack.packb(StableDict({
  87. 'version': 1,
  88. 'archives': self.archives,
  89. 'timestamp': self.timestamp,
  90. 'config': self.config,
  91. }))
  92. self.id = self.key.id_hash(data)
  93. self.repository.put(self.MANIFEST_ID, self.key.encrypt(data))
  94. def list_archive_infos(self, sort_by=None, reverse=False):
  95. # inexpensive Archive.list_archives replacement if we just need .name, .id, .ts
  96. ArchiveInfo = namedtuple('ArchiveInfo', 'name id ts')
  97. archives = []
  98. for name, values in self.archives.items():
  99. ts = parse_timestamp(values[b'time'].decode('utf-8'))
  100. id = values[b'id']
  101. archives.append(ArchiveInfo(name=name, id=id, ts=ts))
  102. if sort_by is not None:
  103. archives = sorted(archives, key=attrgetter(sort_by), reverse=reverse)
  104. return archives
  105. def prune_within(archives, within):
  106. multiplier = {'H': 1, 'd': 24, 'w': 24*7, 'm': 24*31, 'y': 24*365}
  107. try:
  108. hours = int(within[:-1]) * multiplier[within[-1]]
  109. except (KeyError, ValueError):
  110. # I don't like how this displays the original exception too:
  111. raise argparse.ArgumentTypeError('Unable to parse --within option: "%s"' % within)
  112. if hours <= 0:
  113. raise argparse.ArgumentTypeError('Number specified using --within option must be positive')
  114. target = datetime.now(timezone.utc) - timedelta(seconds=hours*60*60)
  115. return [a for a in archives if a.ts > target]
  116. def prune_split(archives, pattern, n, skip=[]):
  117. last = None
  118. keep = []
  119. if n == 0:
  120. return keep
  121. for a in sorted(archives, key=attrgetter('ts'), reverse=True):
  122. period = to_localtime(a.ts).strftime(pattern)
  123. if period != last:
  124. last = period
  125. if a not in skip:
  126. keep.append(a)
  127. if len(keep) == n:
  128. break
  129. return keep
  130. class Statistics:
  131. def __init__(self):
  132. self.osize = self.csize = self.usize = self.nfiles = 0
  133. def update(self, size, csize, unique):
  134. self.osize += size
  135. self.csize += csize
  136. if unique:
  137. self.usize += csize
  138. summary = """\
  139. Original size Compressed size Deduplicated size
  140. {label:15} {stats.osize_fmt:>20s} {stats.csize_fmt:>20s} {stats.usize_fmt:>20s}"""
  141. def __str__(self):
  142. return self.summary.format(stats=self, label='This archive:')
  143. def __repr__(self):
  144. return "<{cls} object at {hash:#x} ({self.osize}, {self.csize}, {self.usize})>".format(cls=type(self).__name__, hash=id(self), self=self)
  145. @property
  146. def osize_fmt(self):
  147. return format_file_size(self.osize)
  148. @property
  149. def usize_fmt(self):
  150. return format_file_size(self.usize)
  151. @property
  152. def csize_fmt(self):
  153. return format_file_size(self.csize)
  154. def show_progress(self, item=None, final=False, stream=None):
  155. columns, lines = get_terminal_size()
  156. if not final:
  157. msg = '{0.osize_fmt} O {0.csize_fmt} C {0.usize_fmt} D {0.nfiles} N '.format(self)
  158. path = remove_surrogates(item[b'path']) if item else ''
  159. space = columns - len(msg)
  160. if space < len('...') + len(path):
  161. path = '%s...%s' % (path[:(space//2)-len('...')], path[-space//2:])
  162. msg += "{0:<{space}}".format(path, space=space)
  163. else:
  164. msg = ' ' * columns
  165. print(msg, file=stream or sys.stderr, end="\r")
  166. (stream or sys.stderr).flush()
  167. def get_keys_dir():
  168. """Determine where to repository keys and cache"""
  169. return os.environ.get('BORG_KEYS_DIR',
  170. os.path.join(os.path.expanduser('~'), '.borg', 'keys'))
  171. def get_cache_dir():
  172. """Determine where to repository keys and cache"""
  173. xdg_cache = os.environ.get('XDG_CACHE_HOME', os.path.join(os.path.expanduser('~'), '.cache'))
  174. return os.environ.get('BORG_CACHE_DIR', os.path.join(xdg_cache, 'borg'))
  175. def to_localtime(ts):
  176. """Convert datetime object from UTC to local time zone"""
  177. return datetime(*time.localtime((ts - datetime(1970, 1, 1, tzinfo=timezone.utc)).total_seconds())[:6])
  178. def parse_timestamp(timestamp):
  179. """Parse a ISO 8601 timestamp string"""
  180. if '.' in timestamp: # microseconds might not be pressent
  181. return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%f').replace(tzinfo=timezone.utc)
  182. else:
  183. return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc)
  184. def update_excludes(args):
  185. """Merge exclude patterns from files with those on command line.
  186. Empty lines and lines starting with '#' are ignored, but whitespace
  187. is not stripped."""
  188. if hasattr(args, 'exclude_files') and args.exclude_files:
  189. if not hasattr(args, 'excludes') or args.excludes is None:
  190. args.excludes = []
  191. for file in args.exclude_files:
  192. patterns = [line.rstrip('\r\n') for line in file if not line.startswith('#')]
  193. args.excludes += [ExcludePattern(pattern) for pattern in patterns if pattern]
  194. file.close()
  195. def adjust_patterns(paths, excludes):
  196. if paths:
  197. return (excludes or []) + [IncludePattern(path) for path in paths] + [ExcludePattern('*')]
  198. else:
  199. return excludes
  200. def exclude_path(path, patterns):
  201. """Used by create and extract sub-commands to determine
  202. whether or not an item should be processed.
  203. """
  204. for pattern in (patterns or []):
  205. if pattern.match(path):
  206. return isinstance(pattern, ExcludePattern)
  207. return False
  208. # For both IncludePattern and ExcludePattern, we require that
  209. # the pattern either match the whole path or an initial segment
  210. # of the path up to but not including a path separator. To
  211. # unify the two cases, we add a path separator to the end of
  212. # the path before matching.
  213. def normalized(func):
  214. """ Decorator for the Pattern match methods, returning a wrapper that
  215. normalizes OSX paths to match the normalized pattern on OSX, and
  216. returning the original method on other platforms"""
  217. @wraps(func)
  218. def normalize_wrapper(self, path):
  219. return func(self, unicodedata.normalize("NFD", path))
  220. if sys.platform in ('darwin',):
  221. # HFS+ converts paths to a canonical form, so users shouldn't be
  222. # required to enter an exact match
  223. return normalize_wrapper
  224. else:
  225. # Windows and Unix filesystems allow different forms, so users
  226. # always have to enter an exact match
  227. return func
  228. class IncludePattern:
  229. """Literal files or directories listed on the command line
  230. for some operations (e.g. extract, but not create).
  231. If a directory is specified, all paths that start with that
  232. path match as well. A trailing slash makes no difference.
  233. """
  234. def __init__(self, pattern):
  235. self.pattern_orig = pattern
  236. self.match_count = 0
  237. if sys.platform in ('darwin',):
  238. pattern = unicodedata.normalize("NFD", pattern)
  239. self.pattern = os.path.normpath(pattern).rstrip(os.path.sep)+os.path.sep
  240. @normalized
  241. def match(self, path):
  242. matches = (path+os.path.sep).startswith(self.pattern)
  243. if matches:
  244. self.match_count += 1
  245. return matches
  246. def __repr__(self):
  247. return '%s(%s)' % (type(self), self.pattern)
  248. def __str__(self):
  249. return self.pattern_orig
  250. class ExcludePattern(IncludePattern):
  251. """Shell glob patterns to exclude. A trailing slash means to
  252. exclude the contents of a directory, but not the directory itself.
  253. """
  254. def __init__(self, pattern):
  255. self.pattern_orig = pattern
  256. self.match_count = 0
  257. if pattern.endswith(os.path.sep):
  258. self.pattern = os.path.normpath(pattern).rstrip(os.path.sep)+os.path.sep+'*'+os.path.sep
  259. else:
  260. self.pattern = os.path.normpath(pattern)+os.path.sep+'*'
  261. if sys.platform in ('darwin',):
  262. self.pattern = unicodedata.normalize("NFD", self.pattern)
  263. # fnmatch and re.match both cache compiled regular expressions.
  264. # Nevertheless, this is about 10 times faster.
  265. self.regex = re.compile(translate(self.pattern))
  266. @normalized
  267. def match(self, path):
  268. matches = self.regex.match(path+os.path.sep) is not None
  269. if matches:
  270. self.match_count += 1
  271. return matches
  272. def __repr__(self):
  273. return '%s(%s)' % (type(self), self.pattern)
  274. def __str__(self):
  275. return self.pattern_orig
  276. def timestamp(s):
  277. """Convert a --timestamp=s argument to a datetime object"""
  278. try:
  279. # is it pointing to a file / directory?
  280. ts = os.stat(s).st_mtime
  281. return datetime.utcfromtimestamp(ts)
  282. except OSError:
  283. # didn't work, try parsing as timestamp. UTC, no TZ, no microsecs support.
  284. for format in ('%Y-%m-%dT%H:%M:%SZ', '%Y-%m-%dT%H:%M:%S+00:00',
  285. '%Y-%m-%dT%H:%M:%S', '%Y-%m-%d %H:%M:%S',
  286. '%Y-%m-%dT%H:%M', '%Y-%m-%d %H:%M',
  287. '%Y-%m-%d', '%Y-%j',
  288. ):
  289. try:
  290. return datetime.strptime(s, format)
  291. except ValueError:
  292. continue
  293. raise ValueError
  294. def ChunkerParams(s):
  295. chunk_min, chunk_max, chunk_mask, window_size = s.split(',')
  296. if int(chunk_max) > 23:
  297. # do not go beyond 2**23 (8MB) chunk size now,
  298. # COMPR_BUFFER can only cope with up to this size
  299. raise ValueError('max. chunk size exponent must not be more than 23 (2^23 = 8MiB max. chunk size)')
  300. return int(chunk_min), int(chunk_max), int(chunk_mask), int(window_size)
  301. def CompressionSpec(s):
  302. values = s.split(',')
  303. count = len(values)
  304. if count < 1:
  305. raise ValueError
  306. compression = values[0]
  307. try:
  308. compression = int(compression)
  309. if count > 1:
  310. raise ValueError
  311. # DEPRECATED: it is just --compression N
  312. if 0 <= compression <= 9:
  313. print('Warning: --compression %d is deprecated, please use --compression zlib,%d.' % (compression, compression))
  314. if compression == 0:
  315. print('Hint: instead of --compression zlib,0 you could also use --compression none for better performance.')
  316. print('Hint: archives generated using --compression none are not compatible with borg < 0.25.0.')
  317. return dict(name='zlib', level=compression)
  318. raise ValueError
  319. except ValueError:
  320. # --compression algo[,...]
  321. name = compression
  322. if name in ('none', 'lz4', ):
  323. return dict(name=name)
  324. if name in ('zlib', 'lzma', ):
  325. if count < 2:
  326. level = 6 # default compression level in py stdlib
  327. elif count == 2:
  328. level = int(values[1])
  329. if not 0 <= level <= 9:
  330. raise ValueError
  331. else:
  332. raise ValueError
  333. return dict(name=name, level=level)
  334. raise ValueError
  335. def dir_is_cachedir(path):
  336. """Determines whether the specified path is a cache directory (and
  337. therefore should potentially be excluded from the backup) according to
  338. the CACHEDIR.TAG protocol
  339. (http://www.brynosaurus.com/cachedir/spec.html).
  340. """
  341. tag_contents = b'Signature: 8a477f597d28d172789f06886806bc55'
  342. tag_path = os.path.join(path, 'CACHEDIR.TAG')
  343. try:
  344. if os.path.exists(tag_path):
  345. with open(tag_path, 'rb') as tag_file:
  346. tag_data = tag_file.read(len(tag_contents))
  347. if tag_data == tag_contents:
  348. return True
  349. except OSError:
  350. pass
  351. return False
  352. def dir_is_tagged(path, exclude_caches, exclude_if_present):
  353. """Determines whether the specified path is excluded by being a cache
  354. directory or containing user-specified tag files. Returns a list of the
  355. paths of the tag files (either CACHEDIR.TAG or the matching
  356. user-specified files).
  357. """
  358. tag_paths = []
  359. if exclude_caches and dir_is_cachedir(path):
  360. tag_paths.append(os.path.join(path, 'CACHEDIR.TAG'))
  361. if exclude_if_present is not None:
  362. for tag in exclude_if_present:
  363. tag_path = os.path.join(path, tag)
  364. if os.path.isfile(tag_path):
  365. tag_paths.append(tag_path)
  366. return tag_paths
  367. def format_time(t):
  368. """use ISO-8601 date and time format
  369. """
  370. return t.strftime('%Y-%m-%d %H:%M:%S')
  371. def format_timedelta(td):
  372. """Format timedelta in a human friendly format
  373. """
  374. # Since td.total_seconds() requires python 2.7
  375. ts = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6)
  376. s = ts % 60
  377. m = int(ts / 60) % 60
  378. h = int(ts / 3600) % 24
  379. txt = '%.2f seconds' % s
  380. if m:
  381. txt = '%d minutes %s' % (m, txt)
  382. if h:
  383. txt = '%d hours %s' % (h, txt)
  384. if td.days:
  385. txt = '%d days %s' % (td.days, txt)
  386. return txt
  387. def format_file_mode(mod):
  388. """Format file mode bits for list output
  389. """
  390. def x(v):
  391. return ''.join(v & m and s or '-'
  392. for m, s in ((4, 'r'), (2, 'w'), (1, 'x')))
  393. return '%s%s%s' % (x(mod // 64), x(mod // 8), x(mod))
  394. def format_file_size(v, precision=2):
  395. """Format file size into a human friendly format
  396. """
  397. return sizeof_fmt_decimal(v, suffix='B', sep=' ', precision=precision)
  398. def sizeof_fmt(num, suffix='B', units=None, power=None, sep='', precision=2):
  399. for unit in units[:-1]:
  400. if abs(round(num, precision)) < power:
  401. if isinstance(num, int):
  402. return "{}{}{}{}".format(num, sep, unit, suffix)
  403. else:
  404. return "{:3.{}f}{}{}{}".format(num, precision, sep, unit, suffix)
  405. num /= float(power)
  406. return "{:.{}f}{}{}{}".format(num, precision, sep, units[-1], suffix)
  407. def sizeof_fmt_iec(num, suffix='B', sep='', precision=2):
  408. return sizeof_fmt(num, suffix=suffix, sep=sep, precision=precision, units=['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'], power=1024)
  409. def sizeof_fmt_decimal(num, suffix='B', sep='', precision=2):
  410. return sizeof_fmt(num, suffix=suffix, sep=sep, precision=precision, units=['', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y'], power=1000)
  411. def format_archive(archive):
  412. return '%-36s %s' % (archive.name, format_time(to_localtime(archive.ts)))
  413. def memoize(function):
  414. cache = {}
  415. def decorated_function(*args):
  416. try:
  417. return cache[args]
  418. except KeyError:
  419. val = function(*args)
  420. cache[args] = val
  421. return val
  422. return decorated_function
  423. @memoize
  424. def uid2user(uid, default=None):
  425. try:
  426. return pwd.getpwuid(uid).pw_name
  427. except KeyError:
  428. return default
  429. @memoize
  430. def user2uid(user, default=None):
  431. try:
  432. return user and pwd.getpwnam(user).pw_uid
  433. except KeyError:
  434. return default
  435. @memoize
  436. def gid2group(gid, default=None):
  437. try:
  438. return grp.getgrgid(gid).gr_name
  439. except KeyError:
  440. return default
  441. @memoize
  442. def group2gid(group, default=None):
  443. try:
  444. return group and grp.getgrnam(group).gr_gid
  445. except KeyError:
  446. return default
  447. def posix_acl_use_stored_uid_gid(acl):
  448. """Replace the user/group field with the stored uid/gid
  449. """
  450. entries = []
  451. for entry in safe_decode(acl).split('\n'):
  452. if entry:
  453. fields = entry.split(':')
  454. if len(fields) == 4:
  455. entries.append(':'.join([fields[0], fields[3], fields[2]]))
  456. else:
  457. entries.append(entry)
  458. return safe_encode('\n'.join(entries))
  459. def safe_decode(s, coding='utf-8', errors='surrogateescape'):
  460. """decode bytes to str, with round-tripping "invalid" bytes"""
  461. return s.decode(coding, errors)
  462. def safe_encode(s, coding='utf-8', errors='surrogateescape'):
  463. """encode str to bytes, with round-tripping "invalid" bytes"""
  464. return s.encode(coding, errors)
  465. class Location:
  466. """Object representing a repository / archive location
  467. """
  468. proto = user = host = port = path = archive = None
  469. # borg mount's FUSE filesystem creates one level of directories from
  470. # the archive names. Thus, we must not accept "/" in archive names.
  471. ssh_re = re.compile(r'(?P<proto>ssh)://(?:(?P<user>[^@]+)@)?'
  472. r'(?P<host>[^:/#]+)(?::(?P<port>\d+))?'
  473. r'(?P<path>[^:]+)(?:::(?P<archive>[^/]+))?$')
  474. file_re = re.compile(r'(?P<proto>file)://'
  475. r'(?P<path>[^:]+)(?:::(?P<archive>[^/]+))?$')
  476. scp_re = re.compile(r'((?:(?P<user>[^@]+)@)?(?P<host>[^:/]+):)?'
  477. r'(?P<path>[^:]+)(?:::(?P<archive>[^/]+))?$')
  478. # get the repo from BORG_RE env and the optional archive from param.
  479. # if the syntax requires giving REPOSITORY (see "borg mount"),
  480. # use "::" to let it use the env var.
  481. # if REPOSITORY argument is optional, it'll automatically use the env.
  482. env_re = re.compile(r'(?:::(?P<archive>[^/]+)?)?$')
  483. def __init__(self, text=''):
  484. self.orig = text
  485. if not self.parse(self.orig):
  486. raise ValueError
  487. def parse(self, text):
  488. valid = self._parse(text)
  489. if valid:
  490. return True
  491. m = self.env_re.match(text)
  492. if not m:
  493. return False
  494. repo = os.environ.get('BORG_REPO')
  495. if repo is None:
  496. return False
  497. valid = self._parse(repo)
  498. if not valid:
  499. return False
  500. self.archive = m.group('archive')
  501. return True
  502. def _parse(self, text):
  503. m = self.ssh_re.match(text)
  504. if m:
  505. self.proto = m.group('proto')
  506. self.user = m.group('user')
  507. self.host = m.group('host')
  508. self.port = m.group('port') and int(m.group('port')) or None
  509. self.path = m.group('path')
  510. self.archive = m.group('archive')
  511. return True
  512. m = self.file_re.match(text)
  513. if m:
  514. self.proto = m.group('proto')
  515. self.path = m.group('path')
  516. self.archive = m.group('archive')
  517. return True
  518. m = self.scp_re.match(text)
  519. if m:
  520. self.user = m.group('user')
  521. self.host = m.group('host')
  522. self.path = m.group('path')
  523. self.archive = m.group('archive')
  524. self.proto = self.host and 'ssh' or 'file'
  525. return True
  526. return False
  527. def __str__(self):
  528. items = [
  529. 'proto=%r' % self.proto,
  530. 'user=%r' % self.user,
  531. 'host=%r' % self.host,
  532. 'port=%r' % self.port,
  533. 'path=%r' % self.path,
  534. 'archive=%r' % self.archive,
  535. ]
  536. return ', '.join(items)
  537. def to_key_filename(self):
  538. name = re.sub('[^\w]', '_', self.path).strip('_')
  539. if self.proto != 'file':
  540. name = self.host + '__' + name
  541. return os.path.join(get_keys_dir(), name)
  542. def __repr__(self):
  543. return "Location(%s)" % self
  544. def canonical_path(self):
  545. if self.proto == 'file':
  546. return self.path
  547. else:
  548. if self.path and self.path.startswith('~'):
  549. path = '/' + self.path
  550. elif self.path and not self.path.startswith('/'):
  551. path = '/~/' + self.path
  552. else:
  553. path = self.path
  554. return 'ssh://{}{}{}{}'.format('{}@'.format(self.user) if self.user else '',
  555. self.host,
  556. ':{}'.format(self.port) if self.port else '',
  557. path)
  558. def location_validator(archive=None):
  559. def validator(text):
  560. try:
  561. loc = Location(text)
  562. except ValueError:
  563. raise argparse.ArgumentTypeError('Invalid location format: "%s"' % text)
  564. if archive is True and not loc.archive:
  565. raise argparse.ArgumentTypeError('"%s": No archive specified' % text)
  566. elif archive is False and loc.archive:
  567. raise argparse.ArgumentTypeError('"%s" No archive can be specified' % text)
  568. return loc
  569. return validator
  570. def decode_dict(d, keys, encoding='utf-8', errors='surrogateescape'):
  571. for key in keys:
  572. if isinstance(d.get(key), bytes):
  573. d[key] = d[key].decode(encoding, errors)
  574. return d
  575. def remove_surrogates(s, errors='replace'):
  576. """Replace surrogates generated by fsdecode with '?'
  577. """
  578. return s.encode('utf-8', errors).decode('utf-8')
  579. _safe_re = re.compile(r'^((\.\.)?/+)+')
  580. def make_path_safe(path):
  581. """Make path safe by making it relative and local
  582. """
  583. return _safe_re.sub('', path) or '.'
  584. def daemonize():
  585. """Detach process from controlling terminal and run in background
  586. """
  587. pid = os.fork()
  588. if pid:
  589. os._exit(0)
  590. os.setsid()
  591. pid = os.fork()
  592. if pid:
  593. os._exit(0)
  594. os.chdir('/')
  595. os.close(0)
  596. os.close(1)
  597. os.close(2)
  598. fd = os.open('/dev/null', os.O_RDWR)
  599. os.dup2(fd, 0)
  600. os.dup2(fd, 1)
  601. os.dup2(fd, 2)
  602. class StableDict(dict):
  603. """A dict subclass with stable items() ordering"""
  604. def items(self):
  605. return sorted(super().items())
  606. if sys.version < '3.3':
  607. # st_xtime_ns attributes only available in 3.3+
  608. def st_atime_ns(st):
  609. return int(st.st_atime * 1e9)
  610. def st_ctime_ns(st):
  611. return int(st.st_ctime * 1e9)
  612. def st_mtime_ns(st):
  613. return int(st.st_mtime * 1e9)
  614. # unhexlify in < 3.3 incorrectly only accepts bytes input
  615. def unhexlify(data):
  616. if isinstance(data, str):
  617. data = data.encode('ascii')
  618. return binascii.unhexlify(data)
  619. else:
  620. def st_atime_ns(st):
  621. return st.st_atime_ns
  622. def st_ctime_ns(st):
  623. return st.st_ctime_ns
  624. def st_mtime_ns(st):
  625. return st.st_mtime_ns
  626. unhexlify = binascii.unhexlify
  627. def bigint_to_int(mtime):
  628. """Convert bytearray to int
  629. """
  630. if isinstance(mtime, bytes):
  631. return int.from_bytes(mtime, 'little', signed=True)
  632. return mtime
  633. def int_to_bigint(value):
  634. """Convert integers larger than 64 bits to bytearray
  635. Smaller integers are left alone
  636. """
  637. if value.bit_length() > 63:
  638. return value.to_bytes((value.bit_length() + 9) // 8, 'little', signed=True)
  639. return value
  640. def is_slow_msgpack():
  641. return msgpack.Packer is msgpack.fallback.Packer
  642. def yes(msg=None, retry_msg=None, false_msg=None, true_msg=None,
  643. default=False, default_notty=None, default_eof=None,
  644. falsish=('No', 'no', 'N', 'n'), truish=('Yes', 'yes', 'Y', 'y'),
  645. env_var_override=None, ifile=None, ofile=None, input=input):
  646. """
  647. Output <msg> (usually a question) and let user input an answer.
  648. Qualifies the answer according to falsish and truish as True or False.
  649. If it didn't qualify and retry_msg is None (no retries wanted),
  650. return the default [which defaults to False]. Otherwise let user retry
  651. answering until answer is qualified.
  652. If env_var_override is given and it is non-empty, counts as truish answer
  653. and won't ask user for an answer.
  654. If we don't have a tty as input and default_notty is not None, return its value.
  655. Otherwise read input from non-tty and proceed as normal.
  656. If EOF is received instead an input, return default_eof [or default, if not given].
  657. :param msg: introducing message to output on ofile, no \n is added [None]
  658. :param retry_msg: retry message to output on ofile, no \n is added [None]
  659. (also enforces retries instead of returning default)
  660. :param false_msg: message to output before returning False [None]
  661. :param true_msg: message to output before returning True [None]
  662. :param default: default return value (empty answer is given) [False]
  663. :param default_notty: if not None, return its value if no tty is connected [None]
  664. :param default_eof: return value if EOF was read as answer [same as default]
  665. :param falsish: sequence of answers qualifying as False
  666. :param truish: sequence of answers qualifying as True
  667. :param env_var_override: environment variable name [None]
  668. :param ifile: input stream [sys.stdin] (only for testing!)
  669. :param ofile: output stream [sys.stderr]
  670. :param input: input function [input from builtins]
  671. :return: boolean answer value, True or False
  672. """
  673. # note: we do not assign sys.stdin/stderr as defaults above, so they are
  674. # really evaluated NOW, not at function definition time.
  675. if ifile is None:
  676. ifile = sys.stdin
  677. if ofile is None:
  678. ofile = sys.stderr
  679. if default not in (True, False):
  680. raise ValueError("invalid default value, must be True or False")
  681. if default_notty not in (None, True, False):
  682. raise ValueError("invalid default_notty value, must be None, True or False")
  683. if default_eof not in (None, True, False):
  684. raise ValueError("invalid default_eof value, must be None, True or False")
  685. if msg:
  686. print(msg, file=ofile, end='')
  687. ofile.flush()
  688. if env_var_override:
  689. value = os.environ.get(env_var_override)
  690. # currently, any non-empty value counts as truish
  691. # TODO: change this so one can give y/n there?
  692. if value:
  693. value = bool(value)
  694. value_str = truish[0] if value else falsish[0]
  695. print("{} (from {})".format(value_str, env_var_override), file=ofile)
  696. return value
  697. if default_notty is not None and not ifile.isatty():
  698. # looks like ifile is not a terminal (but e.g. a pipe)
  699. return default_notty
  700. while True:
  701. try:
  702. answer = input() # XXX how can we use ifile?
  703. except EOFError:
  704. return default_eof if default_eof is not None else default
  705. if answer in truish:
  706. if true_msg:
  707. print(true_msg, file=ofile)
  708. return True
  709. if answer in falsish:
  710. if false_msg:
  711. print(false_msg, file=ofile)
  712. return False
  713. if retry_msg is None:
  714. # no retries wanted, we just return the default
  715. return default
  716. if retry_msg:
  717. print(retry_msg, file=ofile, end='')
  718. ofile.flush()
  719. class ProgressIndicatorPercent:
  720. def __init__(self, total, step=5, start=0, same_line=False, msg="%3.0f%%", file=sys.stderr):
  721. """
  722. Percentage-based progress indicator
  723. :param total: total amount of items
  724. :param step: step size in percent
  725. :param start: at which percent value to start
  726. :param same_line: if True, emit output always on same line
  727. :param msg: output message, must contain one %f placeholder for the percentage
  728. :param file: output file, default: sys.stderr
  729. """
  730. self.counter = 0 # 0 .. (total-1)
  731. self.total = total
  732. self.trigger_at = start # output next percentage value when reaching (at least) this
  733. self.step = step
  734. self.file = file
  735. self.msg = msg
  736. self.same_line = same_line
  737. def progress(self, current=None):
  738. if current is not None:
  739. self.counter = current
  740. pct = self.counter * 100 / self.total
  741. self.counter += 1
  742. if pct >= self.trigger_at:
  743. self.trigger_at += self.step
  744. return pct
  745. def show(self, current=None):
  746. pct = self.progress(current)
  747. if pct is not None:
  748. return self.output(pct)
  749. def output(self, percent):
  750. print(self.msg % percent, file=self.file, end='\r' if self.same_line else '\n') # python 3.3 gives us flush=True
  751. self.file.flush()
  752. def finish(self):
  753. if self.same_line:
  754. print(" " * len(self.msg % 100.0), file=self.file, end='\r')
  755. class ProgressIndicatorEndless:
  756. def __init__(self, step=10, file=sys.stderr):
  757. """
  758. Progress indicator (long row of dots)
  759. :param step: every Nth call, call the func
  760. :param file: output file, default: sys.stderr
  761. """
  762. self.counter = 0 # call counter
  763. self.triggered = 0 # increases 1 per trigger event
  764. self.step = step # trigger every <step> calls
  765. self.file = file
  766. def progress(self):
  767. self.counter += 1
  768. trigger = self.counter % self.step == 0
  769. if trigger:
  770. self.triggered += 1
  771. return trigger
  772. def show(self):
  773. trigger = self.progress()
  774. if trigger:
  775. return self.output(self.triggered)
  776. def output(self, triggered):
  777. print('.', end='', file=self.file) # python 3.3 gives us flush=True
  778. self.file.flush()
  779. def finish(self):
  780. print(file=self.file)
  781. def sysinfo():
  782. info = []
  783. info.append('Platform: %s' % (' '.join(platform.uname()), ))
  784. if sys.platform.startswith('linux'):
  785. info.append('Linux: %s %s %s LibC: %s %s' % (platform.linux_distribution() + platform.libc_ver()))
  786. info.append('Python: %s %s' % (platform.python_implementation(), platform.python_version()))
  787. info.append('')
  788. return '\n'.join(info)