helpers.py 38 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157
  1. import argparse
  2. from collections import namedtuple
  3. from functools import wraps
  4. import grp
  5. import os
  6. import stat
  7. import textwrap
  8. import pwd
  9. import re
  10. from shutil import get_terminal_size
  11. import sys
  12. import platform
  13. import threading
  14. import time
  15. import unicodedata
  16. import io
  17. import errno
  18. import logging
  19. from .logger import create_logger
  20. logger = create_logger()
  21. from datetime import datetime, timezone, timedelta
  22. from fnmatch import translate
  23. from operator import attrgetter
  24. from . import __version__ as borg_version
  25. from . import hashindex
  26. from . import chunker
  27. from . import crypto
  28. from . import shellpattern
  29. import msgpack
  30. import msgpack.fallback
  31. import socket
  32. # return codes returned by borg command
  33. # when borg is killed by signal N, rc = 128 + N
  34. EXIT_SUCCESS = 0 # everything done, no problems
  35. EXIT_WARNING = 1 # reached normal end of operation, but there were issues
  36. EXIT_ERROR = 2 # terminated abruptly, did not reach end of operation
  37. class Error(Exception):
  38. """Error base class"""
  39. # if we raise such an Error and it is only catched by the uppermost
  40. # exception handler (that exits short after with the given exit_code),
  41. # it is always a (fatal and abrupt) EXIT_ERROR, never just a warning.
  42. exit_code = EXIT_ERROR
  43. # show a traceback?
  44. traceback = False
  45. def get_message(self):
  46. return type(self).__doc__.format(*self.args)
  47. class ErrorWithTraceback(Error):
  48. """like Error, but show a traceback also"""
  49. traceback = True
  50. class IntegrityError(ErrorWithTraceback):
  51. """Data integrity error"""
  52. class ExtensionModuleError(Error):
  53. """The Borg binary extension modules do not seem to be properly installed"""
  54. class NoManifestError(Error):
  55. """Repository has no manifest."""
  56. class PlaceholderError(Error):
  57. """Formatting Error: "{}".format({}): {}({})"""
  58. def check_extension_modules():
  59. from . import platform
  60. if hashindex.API_VERSION != 2:
  61. raise ExtensionModuleError
  62. if chunker.API_VERSION != 2:
  63. raise ExtensionModuleError
  64. if crypto.API_VERSION != 2:
  65. raise ExtensionModuleError
  66. if platform.API_VERSION != 2:
  67. raise ExtensionModuleError
  68. class Manifest:
  69. MANIFEST_ID = b'\0' * 32
  70. def __init__(self, key, repository, item_keys=None):
  71. from .archive import ITEM_KEYS
  72. self.archives = {}
  73. self.config = {}
  74. self.key = key
  75. self.repository = repository
  76. self.item_keys = frozenset(item_keys) if item_keys is not None else ITEM_KEYS
  77. @classmethod
  78. def load(cls, repository, key=None):
  79. from .key import key_factory
  80. from .repository import Repository
  81. from .archive import ITEM_KEYS
  82. try:
  83. cdata = repository.get(cls.MANIFEST_ID)
  84. except Repository.ObjectNotFound:
  85. raise NoManifestError
  86. if not key:
  87. key = key_factory(repository, cdata)
  88. manifest = cls(key, repository)
  89. data = key.decrypt(None, cdata)
  90. manifest.id = key.id_hash(data)
  91. m = msgpack.unpackb(data)
  92. if not m.get(b'version') == 1:
  93. raise ValueError('Invalid manifest version')
  94. manifest.archives = dict((k.decode('utf-8'), v) for k, v in m[b'archives'].items())
  95. manifest.timestamp = m.get(b'timestamp')
  96. if manifest.timestamp:
  97. manifest.timestamp = manifest.timestamp.decode('ascii')
  98. manifest.config = m[b'config']
  99. # valid item keys are whatever is known in the repo or every key we know
  100. manifest.item_keys = frozenset(m.get(b'item_keys', [])) | ITEM_KEYS
  101. return manifest, key
  102. def write(self):
  103. self.timestamp = datetime.utcnow().isoformat()
  104. data = msgpack.packb(StableDict({
  105. 'version': 1,
  106. 'archives': self.archives,
  107. 'timestamp': self.timestamp,
  108. 'config': self.config,
  109. 'item_keys': tuple(self.item_keys),
  110. }))
  111. self.id = self.key.id_hash(data)
  112. self.repository.put(self.MANIFEST_ID, self.key.encrypt(data))
  113. def list_archive_infos(self, sort_by=None, reverse=False):
  114. # inexpensive Archive.list_archives replacement if we just need .name, .id, .ts
  115. ArchiveInfo = namedtuple('ArchiveInfo', 'name id ts')
  116. archives = []
  117. for name, values in self.archives.items():
  118. ts = parse_timestamp(values[b'time'].decode('utf-8'))
  119. id = values[b'id']
  120. archives.append(ArchiveInfo(name=name, id=id, ts=ts))
  121. if sort_by is not None:
  122. archives = sorted(archives, key=attrgetter(sort_by), reverse=reverse)
  123. return archives
  124. def prune_within(archives, within):
  125. multiplier = {'H': 1, 'd': 24, 'w': 24 * 7, 'm': 24 * 31, 'y': 24 * 365}
  126. try:
  127. hours = int(within[:-1]) * multiplier[within[-1]]
  128. except (KeyError, ValueError):
  129. # I don't like how this displays the original exception too:
  130. raise argparse.ArgumentTypeError('Unable to parse --within option: "%s"' % within)
  131. if hours <= 0:
  132. raise argparse.ArgumentTypeError('Number specified using --within option must be positive')
  133. target = datetime.now(timezone.utc) - timedelta(seconds=hours * 3600)
  134. return [a for a in archives if a.ts > target]
  135. def prune_split(archives, pattern, n, skip=[]):
  136. last = None
  137. keep = []
  138. if n == 0:
  139. return keep
  140. for a in sorted(archives, key=attrgetter('ts'), reverse=True):
  141. period = to_localtime(a.ts).strftime(pattern)
  142. if period != last:
  143. last = period
  144. if a not in skip:
  145. keep.append(a)
  146. if len(keep) == n:
  147. break
  148. return keep
  149. class Statistics:
  150. def __init__(self):
  151. self.osize = self.csize = self.usize = self.nfiles = 0
  152. self.last_progress = 0 # timestamp when last progress was shown
  153. def update(self, size, csize, unique):
  154. self.osize += size
  155. self.csize += csize
  156. if unique:
  157. self.usize += csize
  158. summary = """\
  159. Original size Compressed size Deduplicated size
  160. {label:15} {stats.osize_fmt:>20s} {stats.csize_fmt:>20s} {stats.usize_fmt:>20s}"""
  161. def __str__(self):
  162. return self.summary.format(stats=self, label='This archive:')
  163. def __repr__(self):
  164. return "<{cls} object at {hash:#x} ({self.osize}, {self.csize}, {self.usize})>".format(cls=type(self).__name__, hash=id(self), self=self)
  165. @property
  166. def osize_fmt(self):
  167. return format_file_size(self.osize)
  168. @property
  169. def usize_fmt(self):
  170. return format_file_size(self.usize)
  171. @property
  172. def csize_fmt(self):
  173. return format_file_size(self.csize)
  174. def show_progress(self, item=None, final=False, stream=None, dt=None):
  175. now = time.time()
  176. if dt is None or now - self.last_progress > dt:
  177. self.last_progress = now
  178. columns, lines = get_terminal_size()
  179. if not final:
  180. msg = '{0.osize_fmt} O {0.csize_fmt} C {0.usize_fmt} D {0.nfiles} N '.format(self)
  181. path = remove_surrogates(item[b'path']) if item else ''
  182. space = columns - len(msg)
  183. if space < len('...') + len(path):
  184. path = '%s...%s' % (path[:(space // 2) - len('...')], path[-space // 2:])
  185. msg += "{0:<{space}}".format(path, space=space)
  186. else:
  187. msg = ' ' * columns
  188. print(msg, file=stream or sys.stderr, end="\r", flush=True)
  189. def get_keys_dir():
  190. """Determine where to repository keys and cache"""
  191. xdg_config = os.environ.get('XDG_CONFIG_HOME', os.path.join(os.path.expanduser('~'), '.config'))
  192. keys_dir = os.environ.get('BORG_KEYS_DIR', os.path.join(xdg_config, 'borg', 'keys'))
  193. if not os.path.exists(keys_dir):
  194. os.makedirs(keys_dir)
  195. os.chmod(keys_dir, stat.S_IRWXU)
  196. return keys_dir
  197. def get_cache_dir():
  198. """Determine where to repository keys and cache"""
  199. xdg_cache = os.environ.get('XDG_CACHE_HOME', os.path.join(os.path.expanduser('~'), '.cache'))
  200. cache_dir = os.environ.get('BORG_CACHE_DIR', os.path.join(xdg_cache, 'borg'))
  201. if not os.path.exists(cache_dir):
  202. os.makedirs(cache_dir)
  203. os.chmod(cache_dir, stat.S_IRWXU)
  204. with open(os.path.join(cache_dir, 'CACHEDIR.TAG'), 'w') as fd:
  205. fd.write(textwrap.dedent("""
  206. Signature: 8a477f597d28d172789f06886806bc55
  207. # This file is a cache directory tag created by Borg.
  208. # For information about cache directory tags, see:
  209. # http://www.brynosaurus.com/cachedir/
  210. """).lstrip())
  211. return cache_dir
  212. def to_localtime(ts):
  213. """Convert datetime object from UTC to local time zone"""
  214. return datetime(*time.localtime((ts - datetime(1970, 1, 1, tzinfo=timezone.utc)).total_seconds())[:6])
  215. def parse_timestamp(timestamp):
  216. """Parse a ISO 8601 timestamp string"""
  217. if '.' in timestamp: # microseconds might not be present
  218. return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%f').replace(tzinfo=timezone.utc)
  219. else:
  220. return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc)
  221. def load_excludes(fh):
  222. """Load and parse exclude patterns from file object. Lines empty or starting with '#' after stripping whitespace on
  223. both line ends are ignored.
  224. """
  225. patterns = (line for line in (i.strip() for i in fh) if not line.startswith('#'))
  226. return [parse_pattern(pattern) for pattern in patterns if pattern]
  227. def update_excludes(args):
  228. """Merge exclude patterns from files with those on command line."""
  229. if hasattr(args, 'exclude_files') and args.exclude_files:
  230. if not hasattr(args, 'excludes') or args.excludes is None:
  231. args.excludes = []
  232. for file in args.exclude_files:
  233. args.excludes += load_excludes(file)
  234. file.close()
  235. class PatternMatcher:
  236. def __init__(self, fallback=None):
  237. self._items = []
  238. # Value to return from match function when none of the patterns match.
  239. self.fallback = fallback
  240. def add(self, patterns, value):
  241. """Add list of patterns to internal list. The given value is returned from the match function when one of the
  242. given patterns matches.
  243. """
  244. self._items.extend((i, value) for i in patterns)
  245. def match(self, path):
  246. for (pattern, value) in self._items:
  247. if pattern.match(path):
  248. return value
  249. return self.fallback
  250. def normalized(func):
  251. """ Decorator for the Pattern match methods, returning a wrapper that
  252. normalizes OSX paths to match the normalized pattern on OSX, and
  253. returning the original method on other platforms"""
  254. @wraps(func)
  255. def normalize_wrapper(self, path):
  256. return func(self, unicodedata.normalize("NFD", path))
  257. if sys.platform in ('darwin',):
  258. # HFS+ converts paths to a canonical form, so users shouldn't be
  259. # required to enter an exact match
  260. return normalize_wrapper
  261. else:
  262. # Windows and Unix filesystems allow different forms, so users
  263. # always have to enter an exact match
  264. return func
  265. class PatternBase:
  266. """Shared logic for inclusion/exclusion patterns.
  267. """
  268. PREFIX = NotImplemented
  269. def __init__(self, pattern):
  270. self.pattern_orig = pattern
  271. self.match_count = 0
  272. if sys.platform in ('darwin',):
  273. pattern = unicodedata.normalize("NFD", pattern)
  274. self._prepare(pattern)
  275. @normalized
  276. def match(self, path):
  277. matches = self._match(path)
  278. if matches:
  279. self.match_count += 1
  280. return matches
  281. def __repr__(self):
  282. return '%s(%s)' % (type(self), self.pattern)
  283. def __str__(self):
  284. return self.pattern_orig
  285. def _prepare(self, pattern):
  286. raise NotImplementedError
  287. def _match(self, path):
  288. raise NotImplementedError
  289. # For PathPrefixPattern, FnmatchPattern and ShellPattern, we require that the pattern either match the whole path
  290. # or an initial segment of the path up to but not including a path separator. To unify the two cases, we add a path
  291. # separator to the end of the path before matching.
  292. class PathPrefixPattern(PatternBase):
  293. """Literal files or directories listed on the command line
  294. for some operations (e.g. extract, but not create).
  295. If a directory is specified, all paths that start with that
  296. path match as well. A trailing slash makes no difference.
  297. """
  298. PREFIX = "pp"
  299. def _prepare(self, pattern):
  300. self.pattern = os.path.normpath(pattern).rstrip(os.path.sep) + os.path.sep
  301. def _match(self, path):
  302. return (path + os.path.sep).startswith(self.pattern)
  303. class FnmatchPattern(PatternBase):
  304. """Shell glob patterns to exclude. A trailing slash means to
  305. exclude the contents of a directory, but not the directory itself.
  306. """
  307. PREFIX = "fm"
  308. def _prepare(self, pattern):
  309. if pattern.endswith(os.path.sep):
  310. pattern = os.path.normpath(pattern).rstrip(os.path.sep) + os.path.sep + '*' + os.path.sep
  311. else:
  312. pattern = os.path.normpath(pattern) + os.path.sep + '*'
  313. self.pattern = pattern
  314. # fnmatch and re.match both cache compiled regular expressions.
  315. # Nevertheless, this is about 10 times faster.
  316. self.regex = re.compile(translate(self.pattern))
  317. def _match(self, path):
  318. return (self.regex.match(path + os.path.sep) is not None)
  319. class ShellPattern(PatternBase):
  320. """Shell glob patterns to exclude. A trailing slash means to
  321. exclude the contents of a directory, but not the directory itself.
  322. """
  323. PREFIX = "sh"
  324. def _prepare(self, pattern):
  325. sep = os.path.sep
  326. if pattern.endswith(sep):
  327. pattern = os.path.normpath(pattern).rstrip(sep) + sep + "**" + sep + "*" + sep
  328. else:
  329. pattern = os.path.normpath(pattern) + sep + "**" + sep + "*"
  330. self.pattern = pattern
  331. self.regex = re.compile(shellpattern.translate(self.pattern))
  332. def _match(self, path):
  333. return (self.regex.match(path + os.path.sep) is not None)
  334. class RegexPattern(PatternBase):
  335. """Regular expression to exclude.
  336. """
  337. PREFIX = "re"
  338. def _prepare(self, pattern):
  339. self.pattern = pattern
  340. self.regex = re.compile(pattern)
  341. def _match(self, path):
  342. # Normalize path separators
  343. if os.path.sep != '/':
  344. path = path.replace(os.path.sep, '/')
  345. return (self.regex.search(path) is not None)
  346. _PATTERN_STYLES = set([
  347. FnmatchPattern,
  348. PathPrefixPattern,
  349. RegexPattern,
  350. ShellPattern,
  351. ])
  352. _PATTERN_STYLE_BY_PREFIX = dict((i.PREFIX, i) for i in _PATTERN_STYLES)
  353. def parse_pattern(pattern, fallback=FnmatchPattern):
  354. """Read pattern from string and return an instance of the appropriate implementation class.
  355. """
  356. if len(pattern) > 2 and pattern[2] == ":" and pattern[:2].isalnum():
  357. (style, pattern) = (pattern[:2], pattern[3:])
  358. cls = _PATTERN_STYLE_BY_PREFIX.get(style, None)
  359. if cls is None:
  360. raise ValueError("Unknown pattern style: {}".format(style))
  361. else:
  362. cls = fallback
  363. return cls(pattern)
  364. def timestamp(s):
  365. """Convert a --timestamp=s argument to a datetime object"""
  366. try:
  367. # is it pointing to a file / directory?
  368. ts = os.stat(s).st_mtime
  369. return datetime.utcfromtimestamp(ts)
  370. except OSError:
  371. # didn't work, try parsing as timestamp. UTC, no TZ, no microsecs support.
  372. for format in ('%Y-%m-%dT%H:%M:%SZ', '%Y-%m-%dT%H:%M:%S+00:00',
  373. '%Y-%m-%dT%H:%M:%S', '%Y-%m-%d %H:%M:%S',
  374. '%Y-%m-%dT%H:%M', '%Y-%m-%d %H:%M',
  375. '%Y-%m-%d', '%Y-%j',
  376. ):
  377. try:
  378. return datetime.strptime(s, format)
  379. except ValueError:
  380. continue
  381. raise ValueError
  382. def ChunkerParams(s):
  383. chunk_min, chunk_max, chunk_mask, window_size = s.split(',')
  384. if int(chunk_max) > 23:
  385. raise ValueError('max. chunk size exponent must not be more than 23 (2^23 = 8MiB max. chunk size)')
  386. return int(chunk_min), int(chunk_max), int(chunk_mask), int(window_size)
  387. def CompressionSpec(s):
  388. values = s.split(',')
  389. count = len(values)
  390. if count < 1:
  391. raise ValueError
  392. # --compression algo[,level]
  393. name = values[0]
  394. if name in ('none', 'lz4', ):
  395. return dict(name=name)
  396. if name in ('zlib', 'lzma', ):
  397. if count < 2:
  398. level = 6 # default compression level in py stdlib
  399. elif count == 2:
  400. level = int(values[1])
  401. if not 0 <= level <= 9:
  402. raise ValueError
  403. else:
  404. raise ValueError
  405. return dict(name=name, level=level)
  406. raise ValueError
  407. def PrefixSpec(s):
  408. return replace_placeholders(s)
  409. def dir_is_cachedir(path):
  410. """Determines whether the specified path is a cache directory (and
  411. therefore should potentially be excluded from the backup) according to
  412. the CACHEDIR.TAG protocol
  413. (http://www.brynosaurus.com/cachedir/spec.html).
  414. """
  415. tag_contents = b'Signature: 8a477f597d28d172789f06886806bc55'
  416. tag_path = os.path.join(path, 'CACHEDIR.TAG')
  417. try:
  418. if os.path.exists(tag_path):
  419. with open(tag_path, 'rb') as tag_file:
  420. tag_data = tag_file.read(len(tag_contents))
  421. if tag_data == tag_contents:
  422. return True
  423. except OSError:
  424. pass
  425. return False
  426. def dir_is_tagged(path, exclude_caches, exclude_if_present):
  427. """Determines whether the specified path is excluded by being a cache
  428. directory or containing user-specified tag files. Returns a list of the
  429. paths of the tag files (either CACHEDIR.TAG or the matching
  430. user-specified files).
  431. """
  432. tag_paths = []
  433. if exclude_caches and dir_is_cachedir(path):
  434. tag_paths.append(os.path.join(path, 'CACHEDIR.TAG'))
  435. if exclude_if_present is not None:
  436. for tag in exclude_if_present:
  437. tag_path = os.path.join(path, tag)
  438. if os.path.isfile(tag_path):
  439. tag_paths.append(tag_path)
  440. return tag_paths
  441. def format_line(format, data):
  442. try:
  443. return format.format(**data)
  444. except Exception as e:
  445. raise PlaceholderError(format, data, e.__class__.__name__, str(e))
  446. def replace_placeholders(text):
  447. """Replace placeholders in text with their values."""
  448. current_time = datetime.now()
  449. data = {
  450. 'pid': os.getpid(),
  451. 'fqdn': socket.getfqdn(),
  452. 'hostname': socket.gethostname(),
  453. 'now': current_time.now(),
  454. 'utcnow': current_time.utcnow(),
  455. 'user': uid2user(os.getuid(), os.getuid()),
  456. 'borgversion': borg_version,
  457. }
  458. return format_line(text, data)
  459. def safe_timestamp(item_timestamp_ns):
  460. try:
  461. return datetime.fromtimestamp(bigint_to_int(item_timestamp_ns) / 1e9)
  462. except OverflowError:
  463. # likely a broken file time and datetime did not want to go beyond year 9999
  464. return datetime(9999, 12, 31, 23, 59, 59)
  465. def format_time(t):
  466. """use ISO-8601 date and time format
  467. """
  468. return t.strftime('%a, %Y-%m-%d %H:%M:%S')
  469. def format_timedelta(td):
  470. """Format timedelta in a human friendly format
  471. """
  472. # Since td.total_seconds() requires python 2.7
  473. ts = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6)
  474. s = ts % 60
  475. m = int(ts / 60) % 60
  476. h = int(ts / 3600) % 24
  477. txt = '%.2f seconds' % s
  478. if m:
  479. txt = '%d minutes %s' % (m, txt)
  480. if h:
  481. txt = '%d hours %s' % (h, txt)
  482. if td.days:
  483. txt = '%d days %s' % (td.days, txt)
  484. return txt
  485. def format_file_size(v, precision=2):
  486. """Format file size into a human friendly format
  487. """
  488. return sizeof_fmt_decimal(v, suffix='B', sep=' ', precision=precision)
  489. def sizeof_fmt(num, suffix='B', units=None, power=None, sep='', precision=2):
  490. for unit in units[:-1]:
  491. if abs(round(num, precision)) < power:
  492. if isinstance(num, int):
  493. return "{}{}{}{}".format(num, sep, unit, suffix)
  494. else:
  495. return "{:3.{}f}{}{}{}".format(num, precision, sep, unit, suffix)
  496. num /= float(power)
  497. return "{:.{}f}{}{}{}".format(num, precision, sep, units[-1], suffix)
  498. def sizeof_fmt_iec(num, suffix='B', sep='', precision=2):
  499. return sizeof_fmt(num, suffix=suffix, sep=sep, precision=precision, units=['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'], power=1024)
  500. def sizeof_fmt_decimal(num, suffix='B', sep='', precision=2):
  501. return sizeof_fmt(num, suffix=suffix, sep=sep, precision=precision, units=['', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y'], power=1000)
  502. def format_archive(archive):
  503. return '%-36s %s' % (archive.name, format_time(to_localtime(archive.ts)))
  504. def memoize(function):
  505. cache = {}
  506. def decorated_function(*args):
  507. try:
  508. return cache[args]
  509. except KeyError:
  510. val = function(*args)
  511. cache[args] = val
  512. return val
  513. return decorated_function
  514. class Buffer:
  515. """
  516. provide a thread-local buffer
  517. """
  518. def __init__(self, allocator, size=4096, limit=None):
  519. """
  520. Initialize the buffer: use allocator(size) call to allocate a buffer.
  521. Optionally, set the upper <limit> for the buffer size.
  522. """
  523. assert callable(allocator), 'must give alloc(size) function as first param'
  524. assert limit is None or size <= limit, 'initial size must be <= limit'
  525. self._thread_local = threading.local()
  526. self.allocator = allocator
  527. self.limit = limit
  528. self.resize(size, init=True)
  529. def __len__(self):
  530. return len(self._thread_local.buffer)
  531. def resize(self, size, init=False):
  532. """
  533. resize the buffer - to avoid frequent reallocation, we usually always grow (if needed).
  534. giving init=True it is possible to first-time initialize or shrink the buffer.
  535. if a buffer size beyond the limit is requested, raise ValueError.
  536. """
  537. size = int(size)
  538. if self.limit is not None and size > self.limit:
  539. raise ValueError('Requested buffer size %d is above the limit of %d.' % (size, self.limit))
  540. if init or len(self) < size:
  541. self._thread_local.buffer = self.allocator(size)
  542. def get(self, size=None, init=False):
  543. """
  544. return a buffer of at least the requested size (None: any current size).
  545. init=True can be given to trigger shrinking of the buffer to the given size.
  546. """
  547. if size is not None:
  548. self.resize(size, init)
  549. return self._thread_local.buffer
  550. @memoize
  551. def uid2user(uid, default=None):
  552. try:
  553. return pwd.getpwuid(uid).pw_name
  554. except KeyError:
  555. return default
  556. @memoize
  557. def user2uid(user, default=None):
  558. try:
  559. return user and pwd.getpwnam(user).pw_uid
  560. except KeyError:
  561. return default
  562. @memoize
  563. def gid2group(gid, default=None):
  564. try:
  565. return grp.getgrgid(gid).gr_name
  566. except KeyError:
  567. return default
  568. @memoize
  569. def group2gid(group, default=None):
  570. try:
  571. return group and grp.getgrnam(group).gr_gid
  572. except KeyError:
  573. return default
  574. def posix_acl_use_stored_uid_gid(acl):
  575. """Replace the user/group field with the stored uid/gid
  576. """
  577. entries = []
  578. for entry in safe_decode(acl).split('\n'):
  579. if entry:
  580. fields = entry.split(':')
  581. if len(fields) == 4:
  582. entries.append(':'.join([fields[0], fields[3], fields[2]]))
  583. else:
  584. entries.append(entry)
  585. return safe_encode('\n'.join(entries))
  586. def safe_decode(s, coding='utf-8', errors='surrogateescape'):
  587. """decode bytes to str, with round-tripping "invalid" bytes"""
  588. return s.decode(coding, errors)
  589. def safe_encode(s, coding='utf-8', errors='surrogateescape'):
  590. """encode str to bytes, with round-tripping "invalid" bytes"""
  591. return s.encode(coding, errors)
  592. class Location:
  593. """Object representing a repository / archive location
  594. """
  595. proto = user = host = port = path = archive = None
  596. # borg mount's FUSE filesystem creates one level of directories from
  597. # the archive names. Thus, we must not accept "/" in archive names.
  598. ssh_re = re.compile(r'(?P<proto>ssh)://(?:(?P<user>[^@]+)@)?'
  599. r'(?P<host>[^:/#]+)(?::(?P<port>\d+))?'
  600. r'(?P<path>[^:]+)(?:::(?P<archive>[^/]+))?$')
  601. file_re = re.compile(r'(?P<proto>file)://'
  602. r'(?P<path>[^:]+)(?:::(?P<archive>[^/]+))?$')
  603. scp_re = re.compile(r'((?:(?P<user>[^@]+)@)?(?P<host>[^:/]+):)?'
  604. r'(?P<path>[^:]+)(?:::(?P<archive>[^/]+))?$')
  605. # get the repo from BORG_RE env and the optional archive from param.
  606. # if the syntax requires giving REPOSITORY (see "borg mount"),
  607. # use "::" to let it use the env var.
  608. # if REPOSITORY argument is optional, it'll automatically use the env.
  609. env_re = re.compile(r'(?:::(?P<archive>[^/]+)?)?$')
  610. def __init__(self, text=''):
  611. self.orig = text
  612. if not self.parse(self.orig):
  613. raise ValueError
  614. def parse(self, text):
  615. text = replace_placeholders(text)
  616. valid = self._parse(text)
  617. if valid:
  618. return True
  619. m = self.env_re.match(text)
  620. if not m:
  621. return False
  622. repo = os.environ.get('BORG_REPO')
  623. if repo is None:
  624. return False
  625. valid = self._parse(repo)
  626. if not valid:
  627. return False
  628. self.archive = m.group('archive')
  629. return True
  630. def _parse(self, text):
  631. m = self.ssh_re.match(text)
  632. if m:
  633. self.proto = m.group('proto')
  634. self.user = m.group('user')
  635. self.host = m.group('host')
  636. self.port = m.group('port') and int(m.group('port')) or None
  637. self.path = os.path.normpath(m.group('path'))
  638. self.archive = m.group('archive')
  639. return True
  640. m = self.file_re.match(text)
  641. if m:
  642. self.proto = m.group('proto')
  643. self.path = os.path.normpath(m.group('path'))
  644. self.archive = m.group('archive')
  645. return True
  646. m = self.scp_re.match(text)
  647. if m:
  648. self.user = m.group('user')
  649. self.host = m.group('host')
  650. self.path = os.path.normpath(m.group('path'))
  651. self.archive = m.group('archive')
  652. self.proto = self.host and 'ssh' or 'file'
  653. return True
  654. return False
  655. def __str__(self):
  656. items = [
  657. 'proto=%r' % self.proto,
  658. 'user=%r' % self.user,
  659. 'host=%r' % self.host,
  660. 'port=%r' % self.port,
  661. 'path=%r' % self.path,
  662. 'archive=%r' % self.archive,
  663. ]
  664. return ', '.join(items)
  665. def to_key_filename(self):
  666. name = re.sub('[^\w]', '_', self.path).strip('_')
  667. if self.proto != 'file':
  668. name = self.host + '__' + name
  669. return os.path.join(get_keys_dir(), name)
  670. def __repr__(self):
  671. return "Location(%s)" % self
  672. def canonical_path(self):
  673. if self.proto == 'file':
  674. return self.path
  675. else:
  676. if self.path and self.path.startswith('~'):
  677. path = '/' + self.path
  678. elif self.path and not self.path.startswith('/'):
  679. path = '/~/' + self.path
  680. else:
  681. path = self.path
  682. return 'ssh://{}{}{}{}'.format('{}@'.format(self.user) if self.user else '',
  683. self.host,
  684. ':{}'.format(self.port) if self.port else '',
  685. path)
  686. def location_validator(archive=None):
  687. def validator(text):
  688. try:
  689. loc = Location(text)
  690. except ValueError:
  691. raise argparse.ArgumentTypeError('Invalid location format: "%s"' % text) from None
  692. if archive is True and not loc.archive:
  693. raise argparse.ArgumentTypeError('"%s": No archive specified' % text)
  694. elif archive is False and loc.archive:
  695. raise argparse.ArgumentTypeError('"%s" No archive can be specified' % text)
  696. return loc
  697. return validator
  698. def archivename_validator():
  699. def validator(text):
  700. if '/' in text or '::' in text or not text:
  701. raise argparse.ArgumentTypeError('Invalid repository name: "%s"' % text)
  702. return text
  703. return validator
  704. def decode_dict(d, keys, encoding='utf-8', errors='surrogateescape'):
  705. for key in keys:
  706. if isinstance(d.get(key), bytes):
  707. d[key] = d[key].decode(encoding, errors)
  708. return d
  709. def remove_surrogates(s, errors='replace'):
  710. """Replace surrogates generated by fsdecode with '?'
  711. """
  712. return s.encode('utf-8', errors).decode('utf-8')
  713. _safe_re = re.compile(r'^((\.\.)?/+)+')
  714. def make_path_safe(path):
  715. """Make path safe by making it relative and local
  716. """
  717. return _safe_re.sub('', path) or '.'
  718. def daemonize():
  719. """Detach process from controlling terminal and run in background
  720. """
  721. pid = os.fork()
  722. if pid:
  723. os._exit(0)
  724. os.setsid()
  725. pid = os.fork()
  726. if pid:
  727. os._exit(0)
  728. os.chdir('/')
  729. os.close(0)
  730. os.close(1)
  731. os.close(2)
  732. fd = os.open('/dev/null', os.O_RDWR)
  733. os.dup2(fd, 0)
  734. os.dup2(fd, 1)
  735. os.dup2(fd, 2)
  736. class StableDict(dict):
  737. """A dict subclass with stable items() ordering"""
  738. def items(self):
  739. return sorted(super().items())
  740. def bigint_to_int(mtime):
  741. """Convert bytearray to int
  742. """
  743. if isinstance(mtime, bytes):
  744. return int.from_bytes(mtime, 'little', signed=True)
  745. return mtime
  746. def int_to_bigint(value):
  747. """Convert integers larger than 64 bits to bytearray
  748. Smaller integers are left alone
  749. """
  750. if value.bit_length() > 63:
  751. return value.to_bytes((value.bit_length() + 9) // 8, 'little', signed=True)
  752. return value
  753. def is_slow_msgpack():
  754. return msgpack.Packer is msgpack.fallback.Packer
  755. FALSISH = ('No', 'NO', 'no', 'N', 'n', '0', )
  756. TRUISH = ('Yes', 'YES', 'yes', 'Y', 'y', '1', )
  757. DEFAULTISH = ('Default', 'DEFAULT', 'default', 'D', 'd', '', )
  758. def yes(msg=None, false_msg=None, true_msg=None, default_msg=None,
  759. retry_msg=None, invalid_msg=None, env_msg='{} (from {})',
  760. falsish=FALSISH, truish=TRUISH, defaultish=DEFAULTISH,
  761. default=False, retry=True, env_var_override=None, ofile=None, input=input):
  762. """Output <msg> (usually a question) and let user input an answer.
  763. Qualifies the answer according to falsish, truish and defaultish as True, False or <default>.
  764. If it didn't qualify and retry is False (no retries wanted), return the default [which
  765. defaults to False]. If retry is True let user retry answering until answer is qualified.
  766. If env_var_override is given and this var is present in the environment, do not ask
  767. the user, but just use the env var contents as answer as if it was typed in.
  768. Otherwise read input from stdin and proceed as normal.
  769. If EOF is received instead an input or an invalid input without retry possibility,
  770. return default.
  771. :param msg: introducing message to output on ofile, no \n is added [None]
  772. :param retry_msg: retry message to output on ofile, no \n is added [None]
  773. :param false_msg: message to output before returning False [None]
  774. :param true_msg: message to output before returning True [None]
  775. :param default_msg: message to output before returning a <default> [None]
  776. :param invalid_msg: message to output after a invalid answer was given [None]
  777. :param env_msg: message to output when using input from env_var_override ['{} (from {})'],
  778. needs to have 2 placeholders for answer and env var name
  779. :param falsish: sequence of answers qualifying as False
  780. :param truish: sequence of answers qualifying as True
  781. :param defaultish: sequence of answers qualifying as <default>
  782. :param default: default return value (defaultish answer was given or no-answer condition) [False]
  783. :param retry: if True and input is incorrect, retry. Otherwise return default. [True]
  784. :param env_var_override: environment variable name [None]
  785. :param ofile: output stream [sys.stderr]
  786. :param input: input function [input from builtins]
  787. :return: boolean answer value, True or False
  788. """
  789. # note: we do not assign sys.stderr as default above, so it is
  790. # really evaluated NOW, not at function definition time.
  791. if ofile is None:
  792. ofile = sys.stderr
  793. if default not in (True, False):
  794. raise ValueError("invalid default value, must be True or False")
  795. if msg:
  796. print(msg, file=ofile, end='', flush=True)
  797. while True:
  798. answer = None
  799. if env_var_override:
  800. answer = os.environ.get(env_var_override)
  801. if answer is not None and env_msg:
  802. print(env_msg.format(answer, env_var_override), file=ofile)
  803. if answer is None:
  804. try:
  805. answer = input()
  806. except EOFError:
  807. # avoid defaultish[0], defaultish could be empty
  808. answer = truish[0] if default else falsish[0]
  809. if answer in defaultish:
  810. if default_msg:
  811. print(default_msg, file=ofile)
  812. return default
  813. if answer in truish:
  814. if true_msg:
  815. print(true_msg, file=ofile)
  816. return True
  817. if answer in falsish:
  818. if false_msg:
  819. print(false_msg, file=ofile)
  820. return False
  821. # if we get here, the answer was invalid
  822. if invalid_msg:
  823. print(invalid_msg, file=ofile)
  824. if not retry:
  825. return default
  826. if retry_msg:
  827. print(retry_msg, file=ofile, end='', flush=True)
  828. # in case we used an environment variable and it gave an invalid answer, do not use it again:
  829. env_var_override = None
  830. class ProgressIndicatorPercent:
  831. def __init__(self, total, step=5, start=0, same_line=False, msg="%3.0f%%", file=None):
  832. """
  833. Percentage-based progress indicator
  834. :param total: total amount of items
  835. :param step: step size in percent
  836. :param start: at which percent value to start
  837. :param same_line: if True, emit output always on same line
  838. :param msg: output message, must contain one %f placeholder for the percentage
  839. :param file: output file, default: sys.stderr
  840. """
  841. self.counter = 0 # 0 .. (total-1)
  842. self.total = total
  843. self.trigger_at = start # output next percentage value when reaching (at least) this
  844. self.step = step
  845. if file is None:
  846. file = sys.stderr
  847. self.file = file
  848. self.msg = msg
  849. self.same_line = same_line
  850. def progress(self, current=None):
  851. if current is not None:
  852. self.counter = current
  853. pct = self.counter * 100 / self.total
  854. self.counter += 1
  855. if pct >= self.trigger_at:
  856. self.trigger_at += self.step
  857. return pct
  858. def show(self, current=None):
  859. pct = self.progress(current)
  860. if pct is not None:
  861. return self.output(pct)
  862. def output(self, percent):
  863. print(self.msg % percent, file=self.file, end='\r' if self.same_line else '\n', flush=True)
  864. def finish(self):
  865. if self.same_line:
  866. print(" " * len(self.msg % 100.0), file=self.file, end='\r')
  867. class ProgressIndicatorEndless:
  868. def __init__(self, step=10, file=None):
  869. """
  870. Progress indicator (long row of dots)
  871. :param step: every Nth call, call the func
  872. :param file: output file, default: sys.stderr
  873. """
  874. self.counter = 0 # call counter
  875. self.triggered = 0 # increases 1 per trigger event
  876. self.step = step # trigger every <step> calls
  877. if file is None:
  878. file = sys.stderr
  879. self.file = file
  880. def progress(self):
  881. self.counter += 1
  882. trigger = self.counter % self.step == 0
  883. if trigger:
  884. self.triggered += 1
  885. return trigger
  886. def show(self):
  887. trigger = self.progress()
  888. if trigger:
  889. return self.output(self.triggered)
  890. def output(self, triggered):
  891. print('.', end='', file=self.file, flush=True)
  892. def finish(self):
  893. print(file=self.file)
  894. def sysinfo():
  895. info = []
  896. info.append('Platform: %s' % (' '.join(platform.uname()), ))
  897. if sys.platform.startswith('linux'):
  898. info.append('Linux: %s %s %s' % platform.linux_distribution())
  899. info.append('Borg: %s Python: %s %s' % (borg_version, platform.python_implementation(), platform.python_version()))
  900. info.append('PID: %d CWD: %s' % (os.getpid(), os.getcwd()))
  901. info.append('sys.argv: %r' % sys.argv)
  902. info.append('SSH_ORIGINAL_COMMAND: %r' % os.environ.get('SSH_ORIGINAL_COMMAND'))
  903. info.append('')
  904. return '\n'.join(info)
  905. def log_multi(*msgs, level=logging.INFO):
  906. """
  907. log multiple lines of text, each line by a separate logging call for cosmetic reasons
  908. each positional argument may be a single or multiple lines (separated by newlines) of text.
  909. """
  910. lines = []
  911. for msg in msgs:
  912. lines.extend(msg.splitlines())
  913. for line in lines:
  914. logger.log(level, line)
  915. class ErrorIgnoringTextIOWrapper(io.TextIOWrapper):
  916. def read(self, n):
  917. if not self.closed:
  918. try:
  919. return super().read(n)
  920. except BrokenPipeError:
  921. try:
  922. super().close()
  923. except OSError:
  924. pass
  925. return ''
  926. def write(self, s):
  927. if not self.closed:
  928. try:
  929. return super().write(s)
  930. except BrokenPipeError:
  931. try:
  932. super().close()
  933. except OSError:
  934. pass
  935. return len(s)