helpers.py 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973
  1. from .support import argparse # see support/__init__.py docstring, DEPRECATED - remove after requiring py 3.4
  2. import binascii
  3. from collections import namedtuple
  4. from functools import wraps
  5. import grp
  6. import os
  7. import pwd
  8. import re
  9. try:
  10. from shutil import get_terminal_size
  11. except ImportError:
  12. def get_terminal_size(fallback=(80, 24)):
  13. TerminalSize = namedtuple('TerminalSize', ['columns', 'lines'])
  14. return TerminalSize(int(os.environ.get('COLUMNS', fallback[0])), int(os.environ.get('LINES', fallback[1])))
  15. import sys
  16. import platform
  17. import time
  18. import unicodedata
  19. from datetime import datetime, timezone, timedelta
  20. from fnmatch import translate
  21. from operator import attrgetter
  22. from . import hashindex
  23. from . import chunker
  24. from . import crypto
  25. import msgpack
  26. import msgpack.fallback
  27. # return codes returned by borg command
  28. # when borg is killed by signal N, rc = 128 + N
  29. EXIT_SUCCESS = 0 # everything done, no problems
  30. EXIT_WARNING = 1 # reached normal end of operation, but there were issues
  31. EXIT_ERROR = 2 # terminated abruptly, did not reach end of operation
  32. class Error(Exception):
  33. """Error base class"""
  34. # if we raise such an Error and it is only catched by the uppermost
  35. # exception handler (that exits short after with the given exit_code),
  36. # it is always a (fatal and abrupt) EXIT_ERROR, never just a warning.
  37. exit_code = EXIT_ERROR
  38. # show a traceback?
  39. traceback = False
  40. def get_message(self):
  41. return type(self).__doc__.format(*self.args)
  42. class ErrorWithTraceback(Error):
  43. """like Error, but show a traceback also"""
  44. traceback = True
  45. class IntegrityError(ErrorWithTraceback):
  46. """Data integrity error"""
  47. class ExtensionModuleError(Error):
  48. """The Borg binary extension modules do not seem to be properly installed"""
  49. def check_extension_modules():
  50. from . import platform
  51. if hashindex.API_VERSION != 2:
  52. raise ExtensionModuleError
  53. if chunker.API_VERSION != 2:
  54. raise ExtensionModuleError
  55. if crypto.API_VERSION != 2:
  56. raise ExtensionModuleError
  57. if platform.API_VERSION != 2:
  58. raise ExtensionModuleError
  59. class Manifest:
  60. MANIFEST_ID = b'\0' * 32
  61. def __init__(self, key, repository):
  62. self.archives = {}
  63. self.config = {}
  64. self.key = key
  65. self.repository = repository
  66. @classmethod
  67. def load(cls, repository, key=None):
  68. from .key import key_factory
  69. cdata = repository.get(cls.MANIFEST_ID)
  70. if not key:
  71. key = key_factory(repository, cdata)
  72. manifest = cls(key, repository)
  73. data = key.decrypt(None, cdata)
  74. manifest.id = key.id_hash(data)
  75. m = msgpack.unpackb(data)
  76. if not m.get(b'version') == 1:
  77. raise ValueError('Invalid manifest version')
  78. manifest.archives = dict((k.decode('utf-8'), v) for k, v in m[b'archives'].items())
  79. manifest.timestamp = m.get(b'timestamp')
  80. if manifest.timestamp:
  81. manifest.timestamp = manifest.timestamp.decode('ascii')
  82. manifest.config = m[b'config']
  83. return manifest, key
  84. def write(self):
  85. self.timestamp = datetime.utcnow().isoformat()
  86. data = msgpack.packb(StableDict({
  87. 'version': 1,
  88. 'archives': self.archives,
  89. 'timestamp': self.timestamp,
  90. 'config': self.config,
  91. }))
  92. self.id = self.key.id_hash(data)
  93. self.repository.put(self.MANIFEST_ID, self.key.encrypt(data))
  94. def list_archive_infos(self, sort_by=None, reverse=False):
  95. # inexpensive Archive.list_archives replacement if we just need .name, .id, .ts
  96. ArchiveInfo = namedtuple('ArchiveInfo', 'name id ts')
  97. archives = []
  98. for name, values in self.archives.items():
  99. ts = parse_timestamp(values[b'time'].decode('utf-8'))
  100. id = values[b'id']
  101. archives.append(ArchiveInfo(name=name, id=id, ts=ts))
  102. if sort_by is not None:
  103. archives = sorted(archives, key=attrgetter(sort_by), reverse=reverse)
  104. return archives
  105. def prune_within(archives, within):
  106. multiplier = {'H': 1, 'd': 24, 'w': 24*7, 'm': 24*31, 'y': 24*365}
  107. try:
  108. hours = int(within[:-1]) * multiplier[within[-1]]
  109. except (KeyError, ValueError):
  110. # I don't like how this displays the original exception too:
  111. raise argparse.ArgumentTypeError('Unable to parse --within option: "%s"' % within)
  112. if hours <= 0:
  113. raise argparse.ArgumentTypeError('Number specified using --within option must be positive')
  114. target = datetime.now(timezone.utc) - timedelta(seconds=hours*60*60)
  115. return [a for a in archives if a.ts > target]
  116. def prune_split(archives, pattern, n, skip=[]):
  117. last = None
  118. keep = []
  119. if n == 0:
  120. return keep
  121. for a in sorted(archives, key=attrgetter('ts'), reverse=True):
  122. period = to_localtime(a.ts).strftime(pattern)
  123. if period != last:
  124. last = period
  125. if a not in skip:
  126. keep.append(a)
  127. if len(keep) == n:
  128. break
  129. return keep
  130. class Statistics:
  131. def __init__(self):
  132. self.osize = self.csize = self.usize = self.nfiles = 0
  133. self.last_progress = 0 # timestamp when last progress was shown
  134. def update(self, size, csize, unique):
  135. self.osize += size
  136. self.csize += csize
  137. if unique:
  138. self.usize += csize
  139. summary = """\
  140. Original size Compressed size Deduplicated size
  141. {label:15} {stats.osize_fmt:>20s} {stats.csize_fmt:>20s} {stats.usize_fmt:>20s}"""
  142. def __str__(self):
  143. return self.summary.format(stats=self, label='This archive:')
  144. def __repr__(self):
  145. return "<{cls} object at {hash:#x} ({self.osize}, {self.csize}, {self.usize})>".format(cls=type(self).__name__, hash=id(self), self=self)
  146. @property
  147. def osize_fmt(self):
  148. return format_file_size(self.osize)
  149. @property
  150. def usize_fmt(self):
  151. return format_file_size(self.usize)
  152. @property
  153. def csize_fmt(self):
  154. return format_file_size(self.csize)
  155. def show_progress(self, item=None, final=False, stream=None, dt=None):
  156. now = time.time()
  157. if dt is None or now - self.last_progress > dt:
  158. self.last_progress = now
  159. columns, lines = get_terminal_size()
  160. if not final:
  161. msg = '{0.osize_fmt} O {0.csize_fmt} C {0.usize_fmt} D {0.nfiles} N '.format(self)
  162. path = remove_surrogates(item[b'path']) if item else ''
  163. space = columns - len(msg)
  164. if space < len('...') + len(path):
  165. path = '%s...%s' % (path[:(space//2)-len('...')], path[-space//2:])
  166. msg += "{0:<{space}}".format(path, space=space)
  167. else:
  168. msg = ' ' * columns
  169. print(msg, file=stream or sys.stderr, end="\r")
  170. (stream or sys.stderr).flush()
  171. def get_keys_dir():
  172. """Determine where to repository keys and cache"""
  173. return os.environ.get('BORG_KEYS_DIR',
  174. os.path.join(os.path.expanduser('~'), '.borg', 'keys'))
  175. def get_cache_dir():
  176. """Determine where to repository keys and cache"""
  177. xdg_cache = os.environ.get('XDG_CACHE_HOME', os.path.join(os.path.expanduser('~'), '.cache'))
  178. return os.environ.get('BORG_CACHE_DIR', os.path.join(xdg_cache, 'borg'))
  179. def to_localtime(ts):
  180. """Convert datetime object from UTC to local time zone"""
  181. return datetime(*time.localtime((ts - datetime(1970, 1, 1, tzinfo=timezone.utc)).total_seconds())[:6])
  182. def parse_timestamp(timestamp):
  183. """Parse a ISO 8601 timestamp string"""
  184. if '.' in timestamp: # microseconds might not be present
  185. return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%f').replace(tzinfo=timezone.utc)
  186. else:
  187. return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc)
  188. def load_excludes(fh):
  189. """Load and parse exclude patterns from file object. Empty lines and lines starting with '#' are ignored, but
  190. whitespace is not stripped.
  191. """
  192. patterns = (line.rstrip('\r\n') for line in fh if not line.startswith('#'))
  193. return [ExcludePattern(pattern) for pattern in patterns if pattern]
  194. def update_excludes(args):
  195. """Merge exclude patterns from files with those on command line."""
  196. if hasattr(args, 'exclude_files') and args.exclude_files:
  197. if not hasattr(args, 'excludes') or args.excludes is None:
  198. args.excludes = []
  199. for file in args.exclude_files:
  200. args.excludes += load_excludes(file)
  201. file.close()
  202. def adjust_patterns(paths, excludes):
  203. if paths:
  204. return (excludes or []) + [IncludePattern(path) for path in paths] + [ExcludePattern('*')]
  205. else:
  206. return excludes
  207. def exclude_path(path, patterns):
  208. """Used by create and extract sub-commands to determine
  209. whether or not an item should be processed.
  210. """
  211. for pattern in (patterns or []):
  212. if pattern.match(path):
  213. return isinstance(pattern, ExcludePattern)
  214. return False
  215. # For both IncludePattern and ExcludePattern, we require that
  216. # the pattern either match the whole path or an initial segment
  217. # of the path up to but not including a path separator. To
  218. # unify the two cases, we add a path separator to the end of
  219. # the path before matching.
  220. def normalized(func):
  221. """ Decorator for the Pattern match methods, returning a wrapper that
  222. normalizes OSX paths to match the normalized pattern on OSX, and
  223. returning the original method on other platforms"""
  224. @wraps(func)
  225. def normalize_wrapper(self, path):
  226. return func(self, unicodedata.normalize("NFD", path))
  227. if sys.platform in ('darwin',):
  228. # HFS+ converts paths to a canonical form, so users shouldn't be
  229. # required to enter an exact match
  230. return normalize_wrapper
  231. else:
  232. # Windows and Unix filesystems allow different forms, so users
  233. # always have to enter an exact match
  234. return func
  235. class IncludePattern:
  236. """Literal files or directories listed on the command line
  237. for some operations (e.g. extract, but not create).
  238. If a directory is specified, all paths that start with that
  239. path match as well. A trailing slash makes no difference.
  240. """
  241. def __init__(self, pattern):
  242. self.pattern_orig = pattern
  243. self.match_count = 0
  244. if sys.platform in ('darwin',):
  245. pattern = unicodedata.normalize("NFD", pattern)
  246. self.pattern = os.path.normpath(pattern).rstrip(os.path.sep)+os.path.sep
  247. @normalized
  248. def match(self, path):
  249. matches = (path+os.path.sep).startswith(self.pattern)
  250. if matches:
  251. self.match_count += 1
  252. return matches
  253. def __repr__(self):
  254. return '%s(%s)' % (type(self), self.pattern)
  255. def __str__(self):
  256. return self.pattern_orig
  257. class ExcludePattern(IncludePattern):
  258. """Shell glob patterns to exclude. A trailing slash means to
  259. exclude the contents of a directory, but not the directory itself.
  260. """
  261. def __init__(self, pattern):
  262. self.pattern_orig = pattern
  263. self.match_count = 0
  264. if pattern.endswith(os.path.sep):
  265. self.pattern = os.path.normpath(pattern).rstrip(os.path.sep)+os.path.sep+'*'+os.path.sep
  266. else:
  267. self.pattern = os.path.normpath(pattern)+os.path.sep+'*'
  268. if sys.platform in ('darwin',):
  269. self.pattern = unicodedata.normalize("NFD", self.pattern)
  270. # fnmatch and re.match both cache compiled regular expressions.
  271. # Nevertheless, this is about 10 times faster.
  272. self.regex = re.compile(translate(self.pattern))
  273. @normalized
  274. def match(self, path):
  275. matches = self.regex.match(path+os.path.sep) is not None
  276. if matches:
  277. self.match_count += 1
  278. return matches
  279. def __repr__(self):
  280. return '%s(%s)' % (type(self), self.pattern)
  281. def __str__(self):
  282. return self.pattern_orig
  283. def timestamp(s):
  284. """Convert a --timestamp=s argument to a datetime object"""
  285. try:
  286. # is it pointing to a file / directory?
  287. ts = os.stat(s).st_mtime
  288. return datetime.utcfromtimestamp(ts)
  289. except OSError:
  290. # didn't work, try parsing as timestamp. UTC, no TZ, no microsecs support.
  291. for format in ('%Y-%m-%dT%H:%M:%SZ', '%Y-%m-%dT%H:%M:%S+00:00',
  292. '%Y-%m-%dT%H:%M:%S', '%Y-%m-%d %H:%M:%S',
  293. '%Y-%m-%dT%H:%M', '%Y-%m-%d %H:%M',
  294. '%Y-%m-%d', '%Y-%j',
  295. ):
  296. try:
  297. return datetime.strptime(s, format)
  298. except ValueError:
  299. continue
  300. raise ValueError
  301. def ChunkerParams(s):
  302. chunk_min, chunk_max, chunk_mask, window_size = s.split(',')
  303. if int(chunk_max) > 23:
  304. # do not go beyond 2**23 (8MB) chunk size now,
  305. # COMPR_BUFFER can only cope with up to this size
  306. raise ValueError('max. chunk size exponent must not be more than 23 (2^23 = 8MiB max. chunk size)')
  307. return int(chunk_min), int(chunk_max), int(chunk_mask), int(window_size)
  308. def CompressionSpec(s):
  309. values = s.split(',')
  310. count = len(values)
  311. if count < 1:
  312. raise ValueError
  313. compression = values[0]
  314. try:
  315. compression = int(compression)
  316. if count > 1:
  317. raise ValueError
  318. # DEPRECATED: it is just --compression N
  319. if 0 <= compression <= 9:
  320. print('Warning: --compression %d is deprecated, please use --compression zlib,%d.' % (compression, compression))
  321. if compression == 0:
  322. print('Hint: instead of --compression zlib,0 you could also use --compression none for better performance.')
  323. print('Hint: archives generated using --compression none are not compatible with borg < 0.25.0.')
  324. return dict(name='zlib', level=compression)
  325. raise ValueError
  326. except ValueError:
  327. # --compression algo[,...]
  328. name = compression
  329. if name in ('none', 'lz4', ):
  330. return dict(name=name)
  331. if name in ('zlib', 'lzma', ):
  332. if count < 2:
  333. level = 6 # default compression level in py stdlib
  334. elif count == 2:
  335. level = int(values[1])
  336. if not 0 <= level <= 9:
  337. raise ValueError
  338. else:
  339. raise ValueError
  340. return dict(name=name, level=level)
  341. raise ValueError
  342. def dir_is_cachedir(path):
  343. """Determines whether the specified path is a cache directory (and
  344. therefore should potentially be excluded from the backup) according to
  345. the CACHEDIR.TAG protocol
  346. (http://www.brynosaurus.com/cachedir/spec.html).
  347. """
  348. tag_contents = b'Signature: 8a477f597d28d172789f06886806bc55'
  349. tag_path = os.path.join(path, 'CACHEDIR.TAG')
  350. try:
  351. if os.path.exists(tag_path):
  352. with open(tag_path, 'rb') as tag_file:
  353. tag_data = tag_file.read(len(tag_contents))
  354. if tag_data == tag_contents:
  355. return True
  356. except OSError:
  357. pass
  358. return False
  359. def dir_is_tagged(path, exclude_caches, exclude_if_present):
  360. """Determines whether the specified path is excluded by being a cache
  361. directory or containing user-specified tag files. Returns a list of the
  362. paths of the tag files (either CACHEDIR.TAG or the matching
  363. user-specified files).
  364. """
  365. tag_paths = []
  366. if exclude_caches and dir_is_cachedir(path):
  367. tag_paths.append(os.path.join(path, 'CACHEDIR.TAG'))
  368. if exclude_if_present is not None:
  369. for tag in exclude_if_present:
  370. tag_path = os.path.join(path, tag)
  371. if os.path.isfile(tag_path):
  372. tag_paths.append(tag_path)
  373. return tag_paths
  374. def format_time(t):
  375. """use ISO-8601 date and time format
  376. """
  377. return t.strftime('%Y-%m-%d %H:%M:%S')
  378. def format_timedelta(td):
  379. """Format timedelta in a human friendly format
  380. """
  381. # Since td.total_seconds() requires python 2.7
  382. ts = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6)
  383. s = ts % 60
  384. m = int(ts / 60) % 60
  385. h = int(ts / 3600) % 24
  386. txt = '%.2f seconds' % s
  387. if m:
  388. txt = '%d minutes %s' % (m, txt)
  389. if h:
  390. txt = '%d hours %s' % (h, txt)
  391. if td.days:
  392. txt = '%d days %s' % (td.days, txt)
  393. return txt
  394. def format_file_mode(mod):
  395. """Format file mode bits for list output
  396. """
  397. def x(v):
  398. return ''.join(v & m and s or '-'
  399. for m, s in ((4, 'r'), (2, 'w'), (1, 'x')))
  400. return '%s%s%s' % (x(mod // 64), x(mod // 8), x(mod))
  401. def format_file_size(v, precision=2):
  402. """Format file size into a human friendly format
  403. """
  404. return sizeof_fmt_decimal(v, suffix='B', sep=' ', precision=precision)
  405. def sizeof_fmt(num, suffix='B', units=None, power=None, sep='', precision=2):
  406. for unit in units[:-1]:
  407. if abs(round(num, precision)) < power:
  408. if isinstance(num, int):
  409. return "{}{}{}{}".format(num, sep, unit, suffix)
  410. else:
  411. return "{:3.{}f}{}{}{}".format(num, precision, sep, unit, suffix)
  412. num /= float(power)
  413. return "{:.{}f}{}{}{}".format(num, precision, sep, units[-1], suffix)
  414. def sizeof_fmt_iec(num, suffix='B', sep='', precision=2):
  415. return sizeof_fmt(num, suffix=suffix, sep=sep, precision=precision, units=['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'], power=1024)
  416. def sizeof_fmt_decimal(num, suffix='B', sep='', precision=2):
  417. return sizeof_fmt(num, suffix=suffix, sep=sep, precision=precision, units=['', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y'], power=1000)
  418. def format_archive(archive):
  419. return '%-36s %s' % (archive.name, format_time(to_localtime(archive.ts)))
  420. def memoize(function):
  421. cache = {}
  422. def decorated_function(*args):
  423. try:
  424. return cache[args]
  425. except KeyError:
  426. val = function(*args)
  427. cache[args] = val
  428. return val
  429. return decorated_function
  430. @memoize
  431. def uid2user(uid, default=None):
  432. try:
  433. return pwd.getpwuid(uid).pw_name
  434. except KeyError:
  435. return default
  436. @memoize
  437. def user2uid(user, default=None):
  438. try:
  439. return user and pwd.getpwnam(user).pw_uid
  440. except KeyError:
  441. return default
  442. @memoize
  443. def gid2group(gid, default=None):
  444. try:
  445. return grp.getgrgid(gid).gr_name
  446. except KeyError:
  447. return default
  448. @memoize
  449. def group2gid(group, default=None):
  450. try:
  451. return group and grp.getgrnam(group).gr_gid
  452. except KeyError:
  453. return default
  454. def posix_acl_use_stored_uid_gid(acl):
  455. """Replace the user/group field with the stored uid/gid
  456. """
  457. entries = []
  458. for entry in safe_decode(acl).split('\n'):
  459. if entry:
  460. fields = entry.split(':')
  461. if len(fields) == 4:
  462. entries.append(':'.join([fields[0], fields[3], fields[2]]))
  463. else:
  464. entries.append(entry)
  465. return safe_encode('\n'.join(entries))
  466. def safe_decode(s, coding='utf-8', errors='surrogateescape'):
  467. """decode bytes to str, with round-tripping "invalid" bytes"""
  468. return s.decode(coding, errors)
  469. def safe_encode(s, coding='utf-8', errors='surrogateescape'):
  470. """encode str to bytes, with round-tripping "invalid" bytes"""
  471. return s.encode(coding, errors)
  472. class Location:
  473. """Object representing a repository / archive location
  474. """
  475. proto = user = host = port = path = archive = None
  476. # borg mount's FUSE filesystem creates one level of directories from
  477. # the archive names. Thus, we must not accept "/" in archive names.
  478. ssh_re = re.compile(r'(?P<proto>ssh)://(?:(?P<user>[^@]+)@)?'
  479. r'(?P<host>[^:/#]+)(?::(?P<port>\d+))?'
  480. r'(?P<path>[^:]+)(?:::(?P<archive>[^/]+))?$')
  481. file_re = re.compile(r'(?P<proto>file)://'
  482. r'(?P<path>[^:]+)(?:::(?P<archive>[^/]+))?$')
  483. scp_re = re.compile(r'((?:(?P<user>[^@]+)@)?(?P<host>[^:/]+):)?'
  484. r'(?P<path>[^:]+)(?:::(?P<archive>[^/]+))?$')
  485. # get the repo from BORG_RE env and the optional archive from param.
  486. # if the syntax requires giving REPOSITORY (see "borg mount"),
  487. # use "::" to let it use the env var.
  488. # if REPOSITORY argument is optional, it'll automatically use the env.
  489. env_re = re.compile(r'(?:::(?P<archive>[^/]+)?)?$')
  490. def __init__(self, text=''):
  491. self.orig = text
  492. if not self.parse(self.orig):
  493. raise ValueError
  494. def parse(self, text):
  495. valid = self._parse(text)
  496. if valid:
  497. return True
  498. m = self.env_re.match(text)
  499. if not m:
  500. return False
  501. repo = os.environ.get('BORG_REPO')
  502. if repo is None:
  503. return False
  504. valid = self._parse(repo)
  505. if not valid:
  506. return False
  507. self.archive = m.group('archive')
  508. return True
  509. def _parse(self, text):
  510. m = self.ssh_re.match(text)
  511. if m:
  512. self.proto = m.group('proto')
  513. self.user = m.group('user')
  514. self.host = m.group('host')
  515. self.port = m.group('port') and int(m.group('port')) or None
  516. self.path = m.group('path')
  517. self.archive = m.group('archive')
  518. return True
  519. m = self.file_re.match(text)
  520. if m:
  521. self.proto = m.group('proto')
  522. self.path = m.group('path')
  523. self.archive = m.group('archive')
  524. return True
  525. m = self.scp_re.match(text)
  526. if m:
  527. self.user = m.group('user')
  528. self.host = m.group('host')
  529. self.path = m.group('path')
  530. self.archive = m.group('archive')
  531. self.proto = self.host and 'ssh' or 'file'
  532. return True
  533. return False
  534. def __str__(self):
  535. items = [
  536. 'proto=%r' % self.proto,
  537. 'user=%r' % self.user,
  538. 'host=%r' % self.host,
  539. 'port=%r' % self.port,
  540. 'path=%r' % self.path,
  541. 'archive=%r' % self.archive,
  542. ]
  543. return ', '.join(items)
  544. def to_key_filename(self):
  545. name = re.sub('[^\w]', '_', self.path).strip('_')
  546. if self.proto != 'file':
  547. name = self.host + '__' + name
  548. return os.path.join(get_keys_dir(), name)
  549. def __repr__(self):
  550. return "Location(%s)" % self
  551. def canonical_path(self):
  552. if self.proto == 'file':
  553. return self.path
  554. else:
  555. if self.path and self.path.startswith('~'):
  556. path = '/' + self.path
  557. elif self.path and not self.path.startswith('/'):
  558. path = '/~/' + self.path
  559. else:
  560. path = self.path
  561. return 'ssh://{}{}{}{}'.format('{}@'.format(self.user) if self.user else '',
  562. self.host,
  563. ':{}'.format(self.port) if self.port else '',
  564. path)
  565. def location_validator(archive=None):
  566. def validator(text):
  567. try:
  568. loc = Location(text)
  569. except ValueError:
  570. raise argparse.ArgumentTypeError('Invalid location format: "%s"' % text)
  571. if archive is True and not loc.archive:
  572. raise argparse.ArgumentTypeError('"%s": No archive specified' % text)
  573. elif archive is False and loc.archive:
  574. raise argparse.ArgumentTypeError('"%s" No archive can be specified' % text)
  575. return loc
  576. return validator
  577. def decode_dict(d, keys, encoding='utf-8', errors='surrogateescape'):
  578. for key in keys:
  579. if isinstance(d.get(key), bytes):
  580. d[key] = d[key].decode(encoding, errors)
  581. return d
  582. def remove_surrogates(s, errors='replace'):
  583. """Replace surrogates generated by fsdecode with '?'
  584. """
  585. return s.encode('utf-8', errors).decode('utf-8')
  586. _safe_re = re.compile(r'^((\.\.)?/+)+')
  587. def make_path_safe(path):
  588. """Make path safe by making it relative and local
  589. """
  590. return _safe_re.sub('', path) or '.'
  591. def daemonize():
  592. """Detach process from controlling terminal and run in background
  593. """
  594. pid = os.fork()
  595. if pid:
  596. os._exit(0)
  597. os.setsid()
  598. pid = os.fork()
  599. if pid:
  600. os._exit(0)
  601. os.chdir('/')
  602. os.close(0)
  603. os.close(1)
  604. os.close(2)
  605. fd = os.open('/dev/null', os.O_RDWR)
  606. os.dup2(fd, 0)
  607. os.dup2(fd, 1)
  608. os.dup2(fd, 2)
  609. class StableDict(dict):
  610. """A dict subclass with stable items() ordering"""
  611. def items(self):
  612. return sorted(super().items())
  613. if sys.version < '3.3':
  614. # st_xtime_ns attributes only available in 3.3+
  615. def st_atime_ns(st):
  616. return int(st.st_atime * 1e9)
  617. def st_ctime_ns(st):
  618. return int(st.st_ctime * 1e9)
  619. def st_mtime_ns(st):
  620. return int(st.st_mtime * 1e9)
  621. # unhexlify in < 3.3 incorrectly only accepts bytes input
  622. def unhexlify(data):
  623. if isinstance(data, str):
  624. data = data.encode('ascii')
  625. return binascii.unhexlify(data)
  626. else:
  627. def st_atime_ns(st):
  628. return st.st_atime_ns
  629. def st_ctime_ns(st):
  630. return st.st_ctime_ns
  631. def st_mtime_ns(st):
  632. return st.st_mtime_ns
  633. unhexlify = binascii.unhexlify
  634. def bigint_to_int(mtime):
  635. """Convert bytearray to int
  636. """
  637. if isinstance(mtime, bytes):
  638. return int.from_bytes(mtime, 'little', signed=True)
  639. return mtime
  640. def int_to_bigint(value):
  641. """Convert integers larger than 64 bits to bytearray
  642. Smaller integers are left alone
  643. """
  644. if value.bit_length() > 63:
  645. return value.to_bytes((value.bit_length() + 9) // 8, 'little', signed=True)
  646. return value
  647. def is_slow_msgpack():
  648. return msgpack.Packer is msgpack.fallback.Packer
  649. def yes(msg=None, retry_msg=None, false_msg=None, true_msg=None,
  650. default=False, default_notty=None, default_eof=None,
  651. falsish=('No', 'no', 'N', 'n'), truish=('Yes', 'yes', 'Y', 'y'),
  652. env_var_override=None, ifile=None, ofile=None, input=input):
  653. """
  654. Output <msg> (usually a question) and let user input an answer.
  655. Qualifies the answer according to falsish and truish as True or False.
  656. If it didn't qualify and retry_msg is None (no retries wanted),
  657. return the default [which defaults to False]. Otherwise let user retry
  658. answering until answer is qualified.
  659. If env_var_override is given and it is non-empty, counts as truish answer
  660. and won't ask user for an answer.
  661. If we don't have a tty as input and default_notty is not None, return its value.
  662. Otherwise read input from non-tty and proceed as normal.
  663. If EOF is received instead an input, return default_eof [or default, if not given].
  664. :param msg: introducing message to output on ofile, no \n is added [None]
  665. :param retry_msg: retry message to output on ofile, no \n is added [None]
  666. (also enforces retries instead of returning default)
  667. :param false_msg: message to output before returning False [None]
  668. :param true_msg: message to output before returning True [None]
  669. :param default: default return value (empty answer is given) [False]
  670. :param default_notty: if not None, return its value if no tty is connected [None]
  671. :param default_eof: return value if EOF was read as answer [same as default]
  672. :param falsish: sequence of answers qualifying as False
  673. :param truish: sequence of answers qualifying as True
  674. :param env_var_override: environment variable name [None]
  675. :param ifile: input stream [sys.stdin] (only for testing!)
  676. :param ofile: output stream [sys.stderr]
  677. :param input: input function [input from builtins]
  678. :return: boolean answer value, True or False
  679. """
  680. # note: we do not assign sys.stdin/stderr as defaults above, so they are
  681. # really evaluated NOW, not at function definition time.
  682. if ifile is None:
  683. ifile = sys.stdin
  684. if ofile is None:
  685. ofile = sys.stderr
  686. if default not in (True, False):
  687. raise ValueError("invalid default value, must be True or False")
  688. if default_notty not in (None, True, False):
  689. raise ValueError("invalid default_notty value, must be None, True or False")
  690. if default_eof not in (None, True, False):
  691. raise ValueError("invalid default_eof value, must be None, True or False")
  692. if msg:
  693. print(msg, file=ofile, end='')
  694. ofile.flush()
  695. if env_var_override:
  696. value = os.environ.get(env_var_override)
  697. # currently, any non-empty value counts as truish
  698. # TODO: change this so one can give y/n there?
  699. if value:
  700. value = bool(value)
  701. value_str = truish[0] if value else falsish[0]
  702. print("{} (from {})".format(value_str, env_var_override), file=ofile)
  703. return value
  704. if default_notty is not None and not ifile.isatty():
  705. # looks like ifile is not a terminal (but e.g. a pipe)
  706. return default_notty
  707. while True:
  708. try:
  709. answer = input() # XXX how can we use ifile?
  710. except EOFError:
  711. return default_eof if default_eof is not None else default
  712. if answer in truish:
  713. if true_msg:
  714. print(true_msg, file=ofile)
  715. return True
  716. if answer in falsish:
  717. if false_msg:
  718. print(false_msg, file=ofile)
  719. return False
  720. if retry_msg is None:
  721. # no retries wanted, we just return the default
  722. return default
  723. if retry_msg:
  724. print(retry_msg, file=ofile, end='')
  725. ofile.flush()
  726. class ProgressIndicatorPercent:
  727. def __init__(self, total, step=5, start=0, same_line=False, msg="%3.0f%%", file=sys.stderr):
  728. """
  729. Percentage-based progress indicator
  730. :param total: total amount of items
  731. :param step: step size in percent
  732. :param start: at which percent value to start
  733. :param same_line: if True, emit output always on same line
  734. :param msg: output message, must contain one %f placeholder for the percentage
  735. :param file: output file, default: sys.stderr
  736. """
  737. self.counter = 0 # 0 .. (total-1)
  738. self.total = total
  739. self.trigger_at = start # output next percentage value when reaching (at least) this
  740. self.step = step
  741. self.file = file
  742. self.msg = msg
  743. self.same_line = same_line
  744. def progress(self, current=None):
  745. if current is not None:
  746. self.counter = current
  747. pct = self.counter * 100 / self.total
  748. self.counter += 1
  749. if pct >= self.trigger_at:
  750. self.trigger_at += self.step
  751. return pct
  752. def show(self, current=None):
  753. pct = self.progress(current)
  754. if pct is not None:
  755. return self.output(pct)
  756. def output(self, percent):
  757. print(self.msg % percent, file=self.file, end='\r' if self.same_line else '\n') # python 3.3 gives us flush=True
  758. self.file.flush()
  759. def finish(self):
  760. if self.same_line:
  761. print(" " * len(self.msg % 100.0), file=self.file, end='\r')
  762. class ProgressIndicatorEndless:
  763. def __init__(self, step=10, file=sys.stderr):
  764. """
  765. Progress indicator (long row of dots)
  766. :param step: every Nth call, call the func
  767. :param file: output file, default: sys.stderr
  768. """
  769. self.counter = 0 # call counter
  770. self.triggered = 0 # increases 1 per trigger event
  771. self.step = step # trigger every <step> calls
  772. self.file = file
  773. def progress(self):
  774. self.counter += 1
  775. trigger = self.counter % self.step == 0
  776. if trigger:
  777. self.triggered += 1
  778. return trigger
  779. def show(self):
  780. trigger = self.progress()
  781. if trigger:
  782. return self.output(self.triggered)
  783. def output(self, triggered):
  784. print('.', end='', file=self.file) # python 3.3 gives us flush=True
  785. self.file.flush()
  786. def finish(self):
  787. print(file=self.file)
  788. def sysinfo():
  789. info = []
  790. info.append('Platform: %s' % (' '.join(platform.uname()), ))
  791. if sys.platform.startswith('linux'):
  792. info.append('Linux: %s %s %s LibC: %s %s' % (platform.linux_distribution() + platform.libc_ver()))
  793. info.append('Python: %s %s' % (platform.python_implementation(), platform.python_version()))
  794. info.append('')
  795. return '\n'.join(info)