helpers.py 57 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785
  1. import argparse
  2. import contextlib
  3. import grp
  4. import hashlib
  5. import logging
  6. import io
  7. import os
  8. import os.path
  9. import platform
  10. import pwd
  11. import re
  12. import signal
  13. import socket
  14. import stat
  15. import sys
  16. import textwrap
  17. import threading
  18. import time
  19. import unicodedata
  20. import uuid
  21. from binascii import hexlify
  22. from collections import namedtuple, deque, abc
  23. from datetime import datetime, timezone, timedelta
  24. from fnmatch import translate
  25. from functools import wraps, partial, lru_cache
  26. from itertools import islice
  27. from operator import attrgetter
  28. from string import Formatter
  29. import msgpack
  30. import msgpack.fallback
  31. from .logger import create_logger
  32. logger = create_logger()
  33. from . import __version__ as borg_version
  34. from . import chunker
  35. from . import crypto
  36. from . import hashindex
  37. from . import shellpattern
  38. from .constants import * # NOQA
  39. # meta dict, data bytes
  40. _Chunk = namedtuple('_Chunk', 'meta data')
  41. def Chunk(data, **meta):
  42. return _Chunk(meta, data)
  43. class Error(Exception):
  44. """Error base class"""
  45. # if we raise such an Error and it is only catched by the uppermost
  46. # exception handler (that exits short after with the given exit_code),
  47. # it is always a (fatal and abrupt) EXIT_ERROR, never just a warning.
  48. exit_code = EXIT_ERROR
  49. # show a traceback?
  50. traceback = False
  51. def get_message(self):
  52. return type(self).__doc__.format(*self.args)
  53. class ErrorWithTraceback(Error):
  54. """like Error, but show a traceback also"""
  55. traceback = True
  56. class IntegrityError(ErrorWithTraceback):
  57. """Data integrity error"""
  58. class ExtensionModuleError(Error):
  59. """The Borg binary extension modules do not seem to be properly installed"""
  60. class NoManifestError(Error):
  61. """Repository has no manifest."""
  62. class PlaceholderError(Error):
  63. """Formatting Error: "{}".format({}): {}({})"""
  64. def check_extension_modules():
  65. from . import platform, compress
  66. if hashindex.API_VERSION != 3:
  67. raise ExtensionModuleError
  68. if chunker.API_VERSION != 2:
  69. raise ExtensionModuleError
  70. if compress.API_VERSION != 2:
  71. raise ExtensionModuleError
  72. if crypto.API_VERSION != 3:
  73. raise ExtensionModuleError
  74. if platform.API_VERSION != 3:
  75. raise ExtensionModuleError
  76. ArchiveInfo = namedtuple('ArchiveInfo', 'name id ts')
  77. class Archives(abc.MutableMapping):
  78. """
  79. Nice wrapper around the archives dict, making sure only valid types/values get in
  80. and we can deal with str keys (and it internally encodes to byte keys) and eiter
  81. str timestamps or datetime timestamps.
  82. """
  83. def __init__(self):
  84. # key: encoded archive name, value: dict(b'id': bytes_id, b'time': bytes_iso_ts)
  85. self._archives = {}
  86. def __len__(self):
  87. return len(self._archives)
  88. def __iter__(self):
  89. return iter(safe_decode(name) for name in self._archives)
  90. def __getitem__(self, name):
  91. assert isinstance(name, str)
  92. _name = safe_encode(name)
  93. values = self._archives.get(_name)
  94. if values is None:
  95. raise KeyError
  96. ts = parse_timestamp(values[b'time'].decode('utf-8'))
  97. return ArchiveInfo(name=name, id=values[b'id'], ts=ts)
  98. def __setitem__(self, name, info):
  99. assert isinstance(name, str)
  100. name = safe_encode(name)
  101. assert isinstance(info, tuple)
  102. id, ts = info
  103. assert isinstance(id, bytes)
  104. if isinstance(ts, datetime):
  105. ts = ts.replace(tzinfo=None).isoformat()
  106. assert isinstance(ts, str)
  107. ts = ts.encode()
  108. self._archives[name] = {b'id': id, b'time': ts}
  109. def __delitem__(self, name):
  110. assert isinstance(name, str)
  111. name = safe_encode(name)
  112. del self._archives[name]
  113. def list(self, sort_by=None, reverse=False, prefix=''):
  114. """
  115. Inexpensive Archive.list_archives replacement if we just need .name, .id, .ts
  116. Returns list of borg.helpers.ArchiveInfo instances
  117. """
  118. archives = [x for x in self.values() if x.name.startswith(prefix)]
  119. if sort_by is not None:
  120. archives = sorted(archives, key=attrgetter(sort_by))
  121. if reverse:
  122. archives.reverse()
  123. return archives
  124. def set_raw_dict(self, d):
  125. """set the dict we get from the msgpack unpacker"""
  126. for k, v in d.items():
  127. assert isinstance(k, bytes)
  128. assert isinstance(v, dict) and b'id' in v and b'time' in v
  129. self._archives[k] = v
  130. def get_raw_dict(self):
  131. """get the dict we can give to the msgpack packer"""
  132. return self._archives
  133. class Manifest:
  134. MANIFEST_ID = b'\0' * 32
  135. def __init__(self, key, repository, item_keys=None):
  136. self.archives = Archives()
  137. self.config = {}
  138. self.key = key
  139. self.repository = repository
  140. self.item_keys = frozenset(item_keys) if item_keys is not None else ITEM_KEYS
  141. @property
  142. def id_str(self):
  143. return bin_to_hex(self.id)
  144. @classmethod
  145. def load(cls, repository, key=None):
  146. from .item import ManifestItem
  147. from .key import key_factory
  148. from .repository import Repository
  149. try:
  150. cdata = repository.get(cls.MANIFEST_ID)
  151. except Repository.ObjectNotFound:
  152. raise NoManifestError
  153. if not key:
  154. key = key_factory(repository, cdata)
  155. manifest = cls(key, repository)
  156. _, data = key.decrypt(None, cdata)
  157. manifest.id = key.id_hash(data)
  158. m = ManifestItem(internal_dict=msgpack.unpackb(data))
  159. if m.get('version') != 1:
  160. raise ValueError('Invalid manifest version')
  161. manifest.archives.set_raw_dict(m.archives)
  162. manifest.timestamp = m.get('timestamp')
  163. manifest.config = m.config
  164. # valid item keys are whatever is known in the repo or every key we know
  165. manifest.item_keys = ITEM_KEYS | frozenset(key.decode() for key in m.get('item_keys', []))
  166. return manifest, key
  167. def write(self):
  168. from .item import ManifestItem
  169. self.timestamp = datetime.utcnow().isoformat()
  170. manifest = ManifestItem(
  171. version=1,
  172. archives=self.archives.get_raw_dict(),
  173. timestamp=self.timestamp,
  174. config=self.config,
  175. item_keys=tuple(self.item_keys),
  176. )
  177. data = msgpack.packb(manifest.as_dict())
  178. self.id = self.key.id_hash(data)
  179. self.repository.put(self.MANIFEST_ID, self.key.encrypt(Chunk(data)))
  180. def prune_within(archives, within):
  181. multiplier = {'H': 1, 'd': 24, 'w': 24 * 7, 'm': 24 * 31, 'y': 24 * 365}
  182. try:
  183. hours = int(within[:-1]) * multiplier[within[-1]]
  184. except (KeyError, ValueError):
  185. # I don't like how this displays the original exception too:
  186. raise argparse.ArgumentTypeError('Unable to parse --within option: "%s"' % within)
  187. if hours <= 0:
  188. raise argparse.ArgumentTypeError('Number specified using --within option must be positive')
  189. target = datetime.now(timezone.utc) - timedelta(seconds=hours * 3600)
  190. return [a for a in archives if a.ts > target]
  191. def prune_split(archives, pattern, n, skip=[]):
  192. last = None
  193. keep = []
  194. if n == 0:
  195. return keep
  196. for a in sorted(archives, key=attrgetter('ts'), reverse=True):
  197. period = to_localtime(a.ts).strftime(pattern)
  198. if period != last:
  199. last = period
  200. if a not in skip:
  201. keep.append(a)
  202. if len(keep) == n:
  203. break
  204. return keep
  205. def get_home_dir():
  206. """Get user's home directory while preferring a possibly set HOME
  207. environment variable
  208. """
  209. # os.path.expanduser() behaves differently for '~' and '~someuser' as
  210. # parameters: when called with an explicit username, the possibly set
  211. # environment variable HOME is no longer respected. So we have to check if
  212. # it is set and only expand the user's home directory if HOME is unset.
  213. if os.environ.get('HOME', ''):
  214. return os.environ.get('HOME')
  215. else:
  216. return os.path.expanduser('~%s' % os.environ.get('USER', ''))
  217. def get_keys_dir():
  218. """Determine where to repository keys and cache"""
  219. xdg_config = os.environ.get('XDG_CONFIG_HOME', os.path.join(get_home_dir(), '.config'))
  220. keys_dir = os.environ.get('BORG_KEYS_DIR', os.path.join(xdg_config, 'borg', 'keys'))
  221. if not os.path.exists(keys_dir):
  222. os.makedirs(keys_dir)
  223. os.chmod(keys_dir, stat.S_IRWXU)
  224. return keys_dir
  225. def get_nonces_dir():
  226. """Determine where to store the local nonce high watermark"""
  227. xdg_config = os.environ.get('XDG_CONFIG_HOME', os.path.join(get_home_dir(), '.config'))
  228. nonces_dir = os.environ.get('BORG_NONCES_DIR', os.path.join(xdg_config, 'borg', 'key-nonces'))
  229. if not os.path.exists(nonces_dir):
  230. os.makedirs(nonces_dir)
  231. os.chmod(nonces_dir, stat.S_IRWXU)
  232. return nonces_dir
  233. def get_cache_dir():
  234. """Determine where to repository keys and cache"""
  235. xdg_cache = os.environ.get('XDG_CACHE_HOME', os.path.join(get_home_dir(), '.cache'))
  236. cache_dir = os.environ.get('BORG_CACHE_DIR', os.path.join(xdg_cache, 'borg'))
  237. if not os.path.exists(cache_dir):
  238. os.makedirs(cache_dir)
  239. os.chmod(cache_dir, stat.S_IRWXU)
  240. with open(os.path.join(cache_dir, CACHE_TAG_NAME), 'wb') as fd:
  241. fd.write(CACHE_TAG_CONTENTS)
  242. fd.write(textwrap.dedent("""
  243. # This file is a cache directory tag created by Borg.
  244. # For information about cache directory tags, see:
  245. # http://www.brynosaurus.com/cachedir/
  246. """).encode('ascii'))
  247. return cache_dir
  248. def to_localtime(ts):
  249. """Convert datetime object from UTC to local time zone"""
  250. return datetime(*time.localtime((ts - datetime(1970, 1, 1, tzinfo=timezone.utc)).total_seconds())[:6])
  251. def parse_timestamp(timestamp):
  252. """Parse a ISO 8601 timestamp string"""
  253. if '.' in timestamp: # microseconds might not be present
  254. return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%f').replace(tzinfo=timezone.utc)
  255. else:
  256. return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc)
  257. def load_excludes(fh):
  258. """Load and parse exclude patterns from file object. Lines empty or starting with '#' after stripping whitespace on
  259. both line ends are ignored.
  260. """
  261. return [parse_pattern(pattern) for pattern in clean_lines(fh)]
  262. def update_excludes(args):
  263. """Merge exclude patterns from files with those on command line."""
  264. if hasattr(args, 'exclude_files') and args.exclude_files:
  265. if not hasattr(args, 'excludes') or args.excludes is None:
  266. args.excludes = []
  267. for file in args.exclude_files:
  268. args.excludes += load_excludes(file)
  269. file.close()
  270. class PatternMatcher:
  271. def __init__(self, fallback=None):
  272. self._items = []
  273. # Value to return from match function when none of the patterns match.
  274. self.fallback = fallback
  275. def empty(self):
  276. return not len(self._items)
  277. def add(self, patterns, value):
  278. """Add list of patterns to internal list. The given value is returned from the match function when one of the
  279. given patterns matches.
  280. """
  281. self._items.extend((i, value) for i in patterns)
  282. def match(self, path):
  283. for (pattern, value) in self._items:
  284. if pattern.match(path):
  285. return value
  286. return self.fallback
  287. def normalized(func):
  288. """ Decorator for the Pattern match methods, returning a wrapper that
  289. normalizes OSX paths to match the normalized pattern on OSX, and
  290. returning the original method on other platforms"""
  291. @wraps(func)
  292. def normalize_wrapper(self, path):
  293. return func(self, unicodedata.normalize("NFD", path))
  294. if sys.platform in ('darwin',):
  295. # HFS+ converts paths to a canonical form, so users shouldn't be
  296. # required to enter an exact match
  297. return normalize_wrapper
  298. else:
  299. # Windows and Unix filesystems allow different forms, so users
  300. # always have to enter an exact match
  301. return func
  302. class PatternBase:
  303. """Shared logic for inclusion/exclusion patterns.
  304. """
  305. PREFIX = NotImplemented
  306. def __init__(self, pattern):
  307. self.pattern_orig = pattern
  308. self.match_count = 0
  309. if sys.platform in ('darwin',):
  310. pattern = unicodedata.normalize("NFD", pattern)
  311. self._prepare(pattern)
  312. @normalized
  313. def match(self, path):
  314. matches = self._match(path)
  315. if matches:
  316. self.match_count += 1
  317. return matches
  318. def __repr__(self):
  319. return '%s(%s)' % (type(self), self.pattern)
  320. def __str__(self):
  321. return self.pattern_orig
  322. def _prepare(self, pattern):
  323. raise NotImplementedError
  324. def _match(self, path):
  325. raise NotImplementedError
  326. # For PathPrefixPattern, FnmatchPattern and ShellPattern, we require that the pattern either match the whole path
  327. # or an initial segment of the path up to but not including a path separator. To unify the two cases, we add a path
  328. # separator to the end of the path before matching.
  329. class PathPrefixPattern(PatternBase):
  330. """Literal files or directories listed on the command line
  331. for some operations (e.g. extract, but not create).
  332. If a directory is specified, all paths that start with that
  333. path match as well. A trailing slash makes no difference.
  334. """
  335. PREFIX = "pp"
  336. def _prepare(self, pattern):
  337. self.pattern = os.path.normpath(pattern).rstrip(os.path.sep) + os.path.sep
  338. def _match(self, path):
  339. return (path + os.path.sep).startswith(self.pattern)
  340. class FnmatchPattern(PatternBase):
  341. """Shell glob patterns to exclude. A trailing slash means to
  342. exclude the contents of a directory, but not the directory itself.
  343. """
  344. PREFIX = "fm"
  345. def _prepare(self, pattern):
  346. if pattern.endswith(os.path.sep):
  347. pattern = os.path.normpath(pattern).rstrip(os.path.sep) + os.path.sep + '*' + os.path.sep
  348. else:
  349. pattern = os.path.normpath(pattern) + os.path.sep + '*'
  350. self.pattern = pattern
  351. # fnmatch and re.match both cache compiled regular expressions.
  352. # Nevertheless, this is about 10 times faster.
  353. self.regex = re.compile(translate(self.pattern))
  354. def _match(self, path):
  355. return (self.regex.match(path + os.path.sep) is not None)
  356. class ShellPattern(PatternBase):
  357. """Shell glob patterns to exclude. A trailing slash means to
  358. exclude the contents of a directory, but not the directory itself.
  359. """
  360. PREFIX = "sh"
  361. def _prepare(self, pattern):
  362. sep = os.path.sep
  363. if pattern.endswith(sep):
  364. pattern = os.path.normpath(pattern).rstrip(sep) + sep + "**" + sep + "*" + sep
  365. else:
  366. pattern = os.path.normpath(pattern) + sep + "**" + sep + "*"
  367. self.pattern = pattern
  368. self.regex = re.compile(shellpattern.translate(self.pattern))
  369. def _match(self, path):
  370. return (self.regex.match(path + os.path.sep) is not None)
  371. class RegexPattern(PatternBase):
  372. """Regular expression to exclude.
  373. """
  374. PREFIX = "re"
  375. def _prepare(self, pattern):
  376. self.pattern = pattern
  377. self.regex = re.compile(pattern)
  378. def _match(self, path):
  379. # Normalize path separators
  380. if os.path.sep != '/':
  381. path = path.replace(os.path.sep, '/')
  382. return (self.regex.search(path) is not None)
  383. _PATTERN_STYLES = set([
  384. FnmatchPattern,
  385. PathPrefixPattern,
  386. RegexPattern,
  387. ShellPattern,
  388. ])
  389. _PATTERN_STYLE_BY_PREFIX = dict((i.PREFIX, i) for i in _PATTERN_STYLES)
  390. def parse_pattern(pattern, fallback=FnmatchPattern):
  391. """Read pattern from string and return an instance of the appropriate implementation class.
  392. """
  393. if len(pattern) > 2 and pattern[2] == ":" and pattern[:2].isalnum():
  394. (style, pattern) = (pattern[:2], pattern[3:])
  395. cls = _PATTERN_STYLE_BY_PREFIX.get(style, None)
  396. if cls is None:
  397. raise ValueError("Unknown pattern style: {}".format(style))
  398. else:
  399. cls = fallback
  400. return cls(pattern)
  401. def timestamp(s):
  402. """Convert a --timestamp=s argument to a datetime object"""
  403. try:
  404. # is it pointing to a file / directory?
  405. ts = os.stat(s).st_mtime
  406. return datetime.utcfromtimestamp(ts)
  407. except OSError:
  408. # didn't work, try parsing as timestamp. UTC, no TZ, no microsecs support.
  409. for format in ('%Y-%m-%dT%H:%M:%SZ', '%Y-%m-%dT%H:%M:%S+00:00',
  410. '%Y-%m-%dT%H:%M:%S', '%Y-%m-%d %H:%M:%S',
  411. '%Y-%m-%dT%H:%M', '%Y-%m-%d %H:%M',
  412. '%Y-%m-%d', '%Y-%j',
  413. ):
  414. try:
  415. return datetime.strptime(s, format)
  416. except ValueError:
  417. continue
  418. raise ValueError
  419. def ChunkerParams(s):
  420. if s.strip().lower() == "default":
  421. return CHUNKER_PARAMS
  422. chunk_min, chunk_max, chunk_mask, window_size = s.split(',')
  423. if int(chunk_max) > 23:
  424. raise ValueError('max. chunk size exponent must not be more than 23 (2^23 = 8MiB max. chunk size)')
  425. return int(chunk_min), int(chunk_max), int(chunk_mask), int(window_size)
  426. def CompressionSpec(s):
  427. values = s.split(',')
  428. count = len(values)
  429. if count < 1:
  430. raise ValueError
  431. # --compression algo[,level]
  432. name = values[0]
  433. if name in ('none', 'lz4', ):
  434. return dict(name=name)
  435. if name in ('zlib', 'lzma', ):
  436. if count < 2:
  437. level = 6 # default compression level in py stdlib
  438. elif count == 2:
  439. level = int(values[1])
  440. if not 0 <= level <= 9:
  441. raise ValueError
  442. else:
  443. raise ValueError
  444. return dict(name=name, level=level)
  445. if name == 'auto':
  446. if 2 <= count <= 3:
  447. compression = ','.join(values[1:])
  448. else:
  449. raise ValueError
  450. return dict(name=name, spec=CompressionSpec(compression))
  451. raise ValueError
  452. def dir_is_cachedir(path):
  453. """Determines whether the specified path is a cache directory (and
  454. therefore should potentially be excluded from the backup) according to
  455. the CACHEDIR.TAG protocol
  456. (http://www.brynosaurus.com/cachedir/spec.html).
  457. """
  458. tag_path = os.path.join(path, CACHE_TAG_NAME)
  459. try:
  460. if os.path.exists(tag_path):
  461. with open(tag_path, 'rb') as tag_file:
  462. tag_data = tag_file.read(len(CACHE_TAG_CONTENTS))
  463. if tag_data == CACHE_TAG_CONTENTS:
  464. return True
  465. except OSError:
  466. pass
  467. return False
  468. def dir_is_tagged(path, exclude_caches, exclude_if_present):
  469. """Determines whether the specified path is excluded by being a cache
  470. directory or containing user-specified tag files. Returns a list of the
  471. paths of the tag files (either CACHEDIR.TAG or the matching
  472. user-specified files).
  473. """
  474. tag_paths = []
  475. if exclude_caches and dir_is_cachedir(path):
  476. tag_paths.append(os.path.join(path, CACHE_TAG_NAME))
  477. if exclude_if_present is not None:
  478. for tag in exclude_if_present:
  479. tag_path = os.path.join(path, tag)
  480. if os.path.isfile(tag_path):
  481. tag_paths.append(tag_path)
  482. return tag_paths
  483. def partial_format(format, mapping):
  484. """
  485. Apply format.format_map(mapping) while preserving unknown keys
  486. Does not support attribute access, indexing and ![rsa] conversions
  487. """
  488. for key, value in mapping.items():
  489. key = re.escape(key)
  490. format = re.sub(r'(?<!\{)((\{%s\})|(\{%s:[^\}]*\}))' % (key, key),
  491. lambda match: match.group(1).format_map(mapping),
  492. format)
  493. return format
  494. class DatetimeWrapper:
  495. def __init__(self, dt):
  496. self.dt = dt
  497. def __format__(self, format_spec):
  498. if format_spec == '':
  499. format_spec = '%Y-%m-%dT%H:%M:%S'
  500. return self.dt.__format__(format_spec)
  501. def format_line(format, data):
  502. try:
  503. return format.format(**data)
  504. except Exception as e:
  505. raise PlaceholderError(format, data, e.__class__.__name__, str(e))
  506. def replace_placeholders(text):
  507. """Replace placeholders in text with their values."""
  508. current_time = datetime.now()
  509. data = {
  510. 'pid': os.getpid(),
  511. 'fqdn': socket.getfqdn(),
  512. 'hostname': socket.gethostname(),
  513. 'now': DatetimeWrapper(current_time.now()),
  514. 'utcnow': DatetimeWrapper(current_time.utcnow()),
  515. 'user': uid2user(os.getuid(), os.getuid()),
  516. 'uuid4': str(uuid.uuid4()),
  517. 'borgversion': borg_version,
  518. }
  519. return format_line(text, data)
  520. PrefixSpec = replace_placeholders
  521. HUMAN_SORT_KEYS = ['timestamp'] + list(ArchiveInfo._fields)
  522. HUMAN_SORT_KEYS.remove('ts')
  523. def SortBySpec(text):
  524. for token in text.split(','):
  525. if token not in HUMAN_SORT_KEYS:
  526. raise ValueError('Invalid sort key: %s' % token)
  527. return text.replace('timestamp', 'ts')
  528. def safe_timestamp(item_timestamp_ns):
  529. try:
  530. return datetime.fromtimestamp(bigint_to_int(item_timestamp_ns) / 1e9)
  531. except OverflowError:
  532. # likely a broken file time and datetime did not want to go beyond year 9999
  533. return datetime(9999, 12, 31, 23, 59, 59)
  534. def format_time(t):
  535. """use ISO-8601 date and time format
  536. """
  537. return t.strftime('%a, %Y-%m-%d %H:%M:%S')
  538. def format_timedelta(td):
  539. """Format timedelta in a human friendly format
  540. """
  541. ts = td.total_seconds()
  542. s = ts % 60
  543. m = int(ts / 60) % 60
  544. h = int(ts / 3600) % 24
  545. txt = '%.2f seconds' % s
  546. if m:
  547. txt = '%d minutes %s' % (m, txt)
  548. if h:
  549. txt = '%d hours %s' % (h, txt)
  550. if td.days:
  551. txt = '%d days %s' % (td.days, txt)
  552. return txt
  553. def format_file_size(v, precision=2, sign=False):
  554. """Format file size into a human friendly format
  555. """
  556. return sizeof_fmt_decimal(v, suffix='B', sep=' ', precision=precision, sign=sign)
  557. def parse_file_size(s):
  558. """Return int from file size (1234, 55G, 1.7T)."""
  559. if not s:
  560. return int(s) # will raise
  561. suffix = s[-1]
  562. power = 1000
  563. try:
  564. factor = {
  565. 'K': power,
  566. 'M': power**2,
  567. 'G': power**3,
  568. 'T': power**4,
  569. 'P': power**5,
  570. }[suffix]
  571. s = s[:-1]
  572. except KeyError:
  573. factor = 1
  574. return int(float(s) * factor)
  575. def sizeof_fmt(num, suffix='B', units=None, power=None, sep='', precision=2, sign=False):
  576. prefix = '+' if sign and num > 0 else ''
  577. for unit in units[:-1]:
  578. if abs(round(num, precision)) < power:
  579. if isinstance(num, int):
  580. return "{}{}{}{}{}".format(prefix, num, sep, unit, suffix)
  581. else:
  582. return "{}{:3.{}f}{}{}{}".format(prefix, num, precision, sep, unit, suffix)
  583. num /= float(power)
  584. return "{}{:.{}f}{}{}{}".format(prefix, num, precision, sep, units[-1], suffix)
  585. def sizeof_fmt_iec(num, suffix='B', sep='', precision=2, sign=False):
  586. return sizeof_fmt(num, suffix=suffix, sep=sep, precision=precision, sign=sign,
  587. units=['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'], power=1024)
  588. def sizeof_fmt_decimal(num, suffix='B', sep='', precision=2, sign=False):
  589. return sizeof_fmt(num, suffix=suffix, sep=sep, precision=precision, sign=sign,
  590. units=['', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y'], power=1000)
  591. def format_archive(archive):
  592. return '%-36s %s [%s]' % (
  593. archive.name,
  594. format_time(to_localtime(archive.ts)),
  595. bin_to_hex(archive.id),
  596. )
  597. class Buffer:
  598. """
  599. provide a thread-local buffer
  600. """
  601. def __init__(self, allocator, size=4096, limit=None):
  602. """
  603. Initialize the buffer: use allocator(size) call to allocate a buffer.
  604. Optionally, set the upper <limit> for the buffer size.
  605. """
  606. assert callable(allocator), 'must give alloc(size) function as first param'
  607. assert limit is None or size <= limit, 'initial size must be <= limit'
  608. self._thread_local = threading.local()
  609. self.allocator = allocator
  610. self.limit = limit
  611. self.resize(size, init=True)
  612. def __len__(self):
  613. return len(self._thread_local.buffer)
  614. def resize(self, size, init=False):
  615. """
  616. resize the buffer - to avoid frequent reallocation, we usually always grow (if needed).
  617. giving init=True it is possible to first-time initialize or shrink the buffer.
  618. if a buffer size beyond the limit is requested, raise ValueError.
  619. """
  620. size = int(size)
  621. if self.limit is not None and size > self.limit:
  622. raise ValueError('Requested buffer size %d is above the limit of %d.' % (size, self.limit))
  623. if init or len(self) < size:
  624. self._thread_local.buffer = self.allocator(size)
  625. def get(self, size=None, init=False):
  626. """
  627. return a buffer of at least the requested size (None: any current size).
  628. init=True can be given to trigger shrinking of the buffer to the given size.
  629. """
  630. if size is not None:
  631. self.resize(size, init)
  632. return self._thread_local.buffer
  633. @lru_cache(maxsize=None)
  634. def uid2user(uid, default=None):
  635. try:
  636. return pwd.getpwuid(uid).pw_name
  637. except KeyError:
  638. return default
  639. @lru_cache(maxsize=None)
  640. def user2uid(user, default=None):
  641. try:
  642. return user and pwd.getpwnam(user).pw_uid
  643. except KeyError:
  644. return default
  645. @lru_cache(maxsize=None)
  646. def gid2group(gid, default=None):
  647. try:
  648. return grp.getgrgid(gid).gr_name
  649. except KeyError:
  650. return default
  651. @lru_cache(maxsize=None)
  652. def group2gid(group, default=None):
  653. try:
  654. return group and grp.getgrnam(group).gr_gid
  655. except KeyError:
  656. return default
  657. def posix_acl_use_stored_uid_gid(acl):
  658. """Replace the user/group field with the stored uid/gid
  659. """
  660. entries = []
  661. for entry in safe_decode(acl).split('\n'):
  662. if entry:
  663. fields = entry.split(':')
  664. if len(fields) == 4:
  665. entries.append(':'.join([fields[0], fields[3], fields[2]]))
  666. else:
  667. entries.append(entry)
  668. return safe_encode('\n'.join(entries))
  669. def safe_decode(s, coding='utf-8', errors='surrogateescape'):
  670. """decode bytes to str, with round-tripping "invalid" bytes"""
  671. if s is None:
  672. return None
  673. return s.decode(coding, errors)
  674. def safe_encode(s, coding='utf-8', errors='surrogateescape'):
  675. """encode str to bytes, with round-tripping "invalid" bytes"""
  676. if s is None:
  677. return None
  678. return s.encode(coding, errors)
  679. def bin_to_hex(binary):
  680. return hexlify(binary).decode('ascii')
  681. class Location:
  682. """Object representing a repository / archive location
  683. """
  684. proto = user = host = port = path = archive = None
  685. # borg mount's FUSE filesystem creates one level of directories from
  686. # the archive names. Thus, we must not accept "/" in archive names.
  687. ssh_re = re.compile(r'(?P<proto>ssh)://(?:(?P<user>[^@]+)@)?'
  688. r'(?P<host>[^:/#]+)(?::(?P<port>\d+))?'
  689. r'(?P<path>[^:]+)(?:::(?P<archive>[^/]+))?$')
  690. file_re = re.compile(r'(?P<proto>file)://'
  691. r'(?P<path>[^:]+)(?:::(?P<archive>[^/]+))?$')
  692. scp_re = re.compile(r'((?:(?P<user>[^@]+)@)?(?P<host>[^:/]+):)?'
  693. r'(?P<path>[^:]+)(?:::(?P<archive>[^/]+))?$')
  694. # get the repo from BORG_RE env and the optional archive from param.
  695. # if the syntax requires giving REPOSITORY (see "borg mount"),
  696. # use "::" to let it use the env var.
  697. # if REPOSITORY argument is optional, it'll automatically use the env.
  698. env_re = re.compile(r'(?:::(?P<archive>[^/]+)?)?$')
  699. def __init__(self, text=''):
  700. self.orig = text
  701. if not self.parse(self.orig):
  702. raise ValueError
  703. def parse(self, text):
  704. text = replace_placeholders(text)
  705. valid = self._parse(text)
  706. if valid:
  707. return True
  708. m = self.env_re.match(text)
  709. if not m:
  710. return False
  711. repo = os.environ.get('BORG_REPO')
  712. if repo is None:
  713. return False
  714. valid = self._parse(repo)
  715. if not valid:
  716. return False
  717. self.archive = m.group('archive')
  718. return True
  719. def _parse(self, text):
  720. m = self.ssh_re.match(text)
  721. if m:
  722. self.proto = m.group('proto')
  723. self.user = m.group('user')
  724. self.host = m.group('host')
  725. self.port = m.group('port') and int(m.group('port')) or None
  726. self.path = os.path.normpath(m.group('path'))
  727. self.archive = m.group('archive')
  728. return True
  729. m = self.file_re.match(text)
  730. if m:
  731. self.proto = m.group('proto')
  732. self.path = os.path.normpath(m.group('path'))
  733. self.archive = m.group('archive')
  734. return True
  735. m = self.scp_re.match(text)
  736. if m:
  737. self.user = m.group('user')
  738. self.host = m.group('host')
  739. self.path = os.path.normpath(m.group('path'))
  740. self.archive = m.group('archive')
  741. self.proto = self.host and 'ssh' or 'file'
  742. return True
  743. return False
  744. def __str__(self):
  745. items = [
  746. 'proto=%r' % self.proto,
  747. 'user=%r' % self.user,
  748. 'host=%r' % self.host,
  749. 'port=%r' % self.port,
  750. 'path=%r' % self.path,
  751. 'archive=%r' % self.archive,
  752. ]
  753. return ', '.join(items)
  754. def to_key_filename(self):
  755. name = re.sub('[^\w]', '_', self.path).strip('_')
  756. if self.proto != 'file':
  757. name = self.host + '__' + name
  758. return os.path.join(get_keys_dir(), name)
  759. def __repr__(self):
  760. return "Location(%s)" % self
  761. def canonical_path(self):
  762. if self.proto == 'file':
  763. return self.path
  764. else:
  765. if self.path and self.path.startswith('~'):
  766. path = '/' + self.path
  767. elif self.path and not self.path.startswith('/'):
  768. path = '/~/' + self.path
  769. else:
  770. path = self.path
  771. return 'ssh://{}{}{}{}'.format('{}@'.format(self.user) if self.user else '',
  772. self.host,
  773. ':{}'.format(self.port) if self.port else '',
  774. path)
  775. def location_validator(archive=None):
  776. def validator(text):
  777. try:
  778. loc = Location(text)
  779. except ValueError:
  780. raise argparse.ArgumentTypeError('Invalid location format: "%s"' % text) from None
  781. if archive is True and not loc.archive:
  782. raise argparse.ArgumentTypeError('"%s": No archive specified' % text)
  783. elif archive is False and loc.archive:
  784. raise argparse.ArgumentTypeError('"%s" No archive can be specified' % text)
  785. return loc
  786. return validator
  787. def archivename_validator():
  788. def validator(text):
  789. if '/' in text or '::' in text or not text:
  790. raise argparse.ArgumentTypeError('Invalid repository name: "%s"' % text)
  791. return text
  792. return validator
  793. def decode_dict(d, keys, encoding='utf-8', errors='surrogateescape'):
  794. for key in keys:
  795. if isinstance(d.get(key), bytes):
  796. d[key] = d[key].decode(encoding, errors)
  797. return d
  798. def remove_surrogates(s, errors='replace'):
  799. """Replace surrogates generated by fsdecode with '?'
  800. """
  801. return s.encode('utf-8', errors).decode('utf-8')
  802. _safe_re = re.compile(r'^((\.\.)?/+)+')
  803. def make_path_safe(path):
  804. """Make path safe by making it relative and local
  805. """
  806. return _safe_re.sub('', path) or '.'
  807. def daemonize():
  808. """Detach process from controlling terminal and run in background
  809. """
  810. pid = os.fork()
  811. if pid:
  812. os._exit(0)
  813. os.setsid()
  814. pid = os.fork()
  815. if pid:
  816. os._exit(0)
  817. os.chdir('/')
  818. os.close(0)
  819. os.close(1)
  820. os.close(2)
  821. fd = os.open(os.devnull, os.O_RDWR)
  822. os.dup2(fd, 0)
  823. os.dup2(fd, 1)
  824. os.dup2(fd, 2)
  825. class StableDict(dict):
  826. """A dict subclass with stable items() ordering"""
  827. def items(self):
  828. return sorted(super().items())
  829. def bigint_to_int(mtime):
  830. """Convert bytearray to int
  831. """
  832. if isinstance(mtime, bytes):
  833. return int.from_bytes(mtime, 'little', signed=True)
  834. return mtime
  835. def int_to_bigint(value):
  836. """Convert integers larger than 64 bits to bytearray
  837. Smaller integers are left alone
  838. """
  839. if value.bit_length() > 63:
  840. return value.to_bytes((value.bit_length() + 9) // 8, 'little', signed=True)
  841. return value
  842. def is_slow_msgpack():
  843. return msgpack.Packer is msgpack.fallback.Packer
  844. FALSISH = ('No', 'NO', 'no', 'N', 'n', '0', )
  845. TRUISH = ('Yes', 'YES', 'yes', 'Y', 'y', '1', )
  846. DEFAULTISH = ('Default', 'DEFAULT', 'default', 'D', 'd', '', )
  847. def yes(msg=None, false_msg=None, true_msg=None, default_msg=None,
  848. retry_msg=None, invalid_msg=None, env_msg='{} (from {})',
  849. falsish=FALSISH, truish=TRUISH, defaultish=DEFAULTISH,
  850. default=False, retry=True, env_var_override=None, ofile=None, input=input):
  851. """Output <msg> (usually a question) and let user input an answer.
  852. Qualifies the answer according to falsish, truish and defaultish as True, False or <default>.
  853. If it didn't qualify and retry is False (no retries wanted), return the default [which
  854. defaults to False]. If retry is True let user retry answering until answer is qualified.
  855. If env_var_override is given and this var is present in the environment, do not ask
  856. the user, but just use the env var contents as answer as if it was typed in.
  857. Otherwise read input from stdin and proceed as normal.
  858. If EOF is received instead an input or an invalid input without retry possibility,
  859. return default.
  860. :param msg: introducing message to output on ofile, no \n is added [None]
  861. :param retry_msg: retry message to output on ofile, no \n is added [None]
  862. :param false_msg: message to output before returning False [None]
  863. :param true_msg: message to output before returning True [None]
  864. :param default_msg: message to output before returning a <default> [None]
  865. :param invalid_msg: message to output after a invalid answer was given [None]
  866. :param env_msg: message to output when using input from env_var_override ['{} (from {})'],
  867. needs to have 2 placeholders for answer and env var name
  868. :param falsish: sequence of answers qualifying as False
  869. :param truish: sequence of answers qualifying as True
  870. :param defaultish: sequence of answers qualifying as <default>
  871. :param default: default return value (defaultish answer was given or no-answer condition) [False]
  872. :param retry: if True and input is incorrect, retry. Otherwise return default. [True]
  873. :param env_var_override: environment variable name [None]
  874. :param ofile: output stream [sys.stderr]
  875. :param input: input function [input from builtins]
  876. :return: boolean answer value, True or False
  877. """
  878. # note: we do not assign sys.stderr as default above, so it is
  879. # really evaluated NOW, not at function definition time.
  880. if ofile is None:
  881. ofile = sys.stderr
  882. if default not in (True, False):
  883. raise ValueError("invalid default value, must be True or False")
  884. if msg:
  885. print(msg, file=ofile, end='', flush=True)
  886. while True:
  887. answer = None
  888. if env_var_override:
  889. answer = os.environ.get(env_var_override)
  890. if answer is not None and env_msg:
  891. print(env_msg.format(answer, env_var_override), file=ofile)
  892. if answer is None:
  893. try:
  894. answer = input()
  895. except EOFError:
  896. # avoid defaultish[0], defaultish could be empty
  897. answer = truish[0] if default else falsish[0]
  898. if answer in defaultish:
  899. if default_msg:
  900. print(default_msg, file=ofile)
  901. return default
  902. if answer in truish:
  903. if true_msg:
  904. print(true_msg, file=ofile)
  905. return True
  906. if answer in falsish:
  907. if false_msg:
  908. print(false_msg, file=ofile)
  909. return False
  910. # if we get here, the answer was invalid
  911. if invalid_msg:
  912. print(invalid_msg, file=ofile)
  913. if not retry:
  914. return default
  915. if retry_msg:
  916. print(retry_msg, file=ofile, end='', flush=True)
  917. # in case we used an environment variable and it gave an invalid answer, do not use it again:
  918. env_var_override = None
  919. class ProgressIndicatorPercent:
  920. LOGGER = 'borg.output.progress'
  921. def __init__(self, total=0, step=5, start=0, msg="%3.0f%%"):
  922. """
  923. Percentage-based progress indicator
  924. :param total: total amount of items
  925. :param step: step size in percent
  926. :param start: at which percent value to start
  927. :param msg: output message, must contain one %f placeholder for the percentage
  928. """
  929. self.counter = 0 # 0 .. (total-1)
  930. self.total = total
  931. self.trigger_at = start # output next percentage value when reaching (at least) this
  932. self.step = step
  933. self.msg = msg
  934. self.output_len = len(self.msg % 100.0)
  935. self.handler = None
  936. self.logger = logging.getLogger(self.LOGGER)
  937. # If there are no handlers, set one up explicitly because the
  938. # terminator and propagation needs to be set. If there are,
  939. # they must have been set up by BORG_LOGGING_CONF: skip setup.
  940. if not self.logger.handlers:
  941. self.handler = logging.StreamHandler(stream=sys.stderr)
  942. self.handler.setLevel(logging.INFO)
  943. self.handler.terminator = '\r'
  944. self.logger.addHandler(self.handler)
  945. if self.logger.level == logging.NOTSET:
  946. self.logger.setLevel(logging.WARN)
  947. self.logger.propagate = False
  948. def __del__(self):
  949. if self.handler is not None:
  950. self.logger.removeHandler(self.handler)
  951. self.handler.close()
  952. def progress(self, current=None, increase=1):
  953. if current is not None:
  954. self.counter = current
  955. pct = self.counter * 100 / self.total
  956. self.counter += increase
  957. if pct >= self.trigger_at:
  958. self.trigger_at += self.step
  959. return pct
  960. def show(self, current=None, increase=1):
  961. pct = self.progress(current, increase)
  962. if pct is not None:
  963. return self.output(self.msg % pct)
  964. def output(self, message):
  965. self.output_len = max(len(message), self.output_len)
  966. message = message.ljust(self.output_len)
  967. self.logger.info(message)
  968. def finish(self):
  969. self.output('')
  970. class ProgressIndicatorEndless:
  971. def __init__(self, step=10, file=None):
  972. """
  973. Progress indicator (long row of dots)
  974. :param step: every Nth call, call the func
  975. :param file: output file, default: sys.stderr
  976. """
  977. self.counter = 0 # call counter
  978. self.triggered = 0 # increases 1 per trigger event
  979. self.step = step # trigger every <step> calls
  980. if file is None:
  981. file = sys.stderr
  982. self.file = file
  983. def progress(self):
  984. self.counter += 1
  985. trigger = self.counter % self.step == 0
  986. if trigger:
  987. self.triggered += 1
  988. return trigger
  989. def show(self):
  990. trigger = self.progress()
  991. if trigger:
  992. return self.output(self.triggered)
  993. def output(self, triggered):
  994. print('.', end='', file=self.file, flush=True)
  995. def finish(self):
  996. print(file=self.file)
  997. def sysinfo():
  998. info = []
  999. info.append('Platform: %s' % (' '.join(platform.uname()), ))
  1000. if sys.platform.startswith('linux'):
  1001. info.append('Linux: %s %s %s' % platform.linux_distribution())
  1002. info.append('Borg: %s Python: %s %s' % (borg_version, platform.python_implementation(), platform.python_version()))
  1003. info.append('PID: %d CWD: %s' % (os.getpid(), os.getcwd()))
  1004. info.append('sys.argv: %r' % sys.argv)
  1005. info.append('SSH_ORIGINAL_COMMAND: %r' % os.environ.get('SSH_ORIGINAL_COMMAND'))
  1006. info.append('')
  1007. return '\n'.join(info)
  1008. def log_multi(*msgs, level=logging.INFO, logger=logger):
  1009. """
  1010. log multiple lines of text, each line by a separate logging call for cosmetic reasons
  1011. each positional argument may be a single or multiple lines (separated by newlines) of text.
  1012. """
  1013. lines = []
  1014. for msg in msgs:
  1015. lines.extend(msg.splitlines())
  1016. for line in lines:
  1017. logger.log(level, line)
  1018. class BaseFormatter:
  1019. FIXED_KEYS = {
  1020. # Formatting aids
  1021. 'LF': '\n',
  1022. 'SPACE': ' ',
  1023. 'TAB': '\t',
  1024. 'CR': '\r',
  1025. 'NUL': '\0',
  1026. 'NEWLINE': os.linesep,
  1027. 'NL': os.linesep,
  1028. }
  1029. def get_item_data(self, item):
  1030. raise NotImplementedError
  1031. def format_item(self, item):
  1032. return self.format.format_map(self.get_item_data(item))
  1033. @staticmethod
  1034. def keys_help():
  1035. return " - NEWLINE: OS dependent line separator\n" \
  1036. " - NL: alias of NEWLINE\n" \
  1037. " - NUL: NUL character for creating print0 / xargs -0 like output, see barchive/bpath\n" \
  1038. " - SPACE\n" \
  1039. " - TAB\n" \
  1040. " - CR\n" \
  1041. " - LF"
  1042. class ArchiveFormatter(BaseFormatter):
  1043. def __init__(self, format):
  1044. self.format = partial_format(format, self.FIXED_KEYS)
  1045. def get_item_data(self, archive):
  1046. return {
  1047. 'barchive': archive.name,
  1048. 'archive': remove_surrogates(archive.name),
  1049. 'id': bin_to_hex(archive.id),
  1050. 'time': format_time(to_localtime(archive.ts)),
  1051. }
  1052. @staticmethod
  1053. def keys_help():
  1054. return " - archive: archive name interpreted as text (might be missing non-text characters, see barchive)\n" \
  1055. " - barchive: verbatim archive name, can contain any character except NUL\n" \
  1056. " - time: time of creation of the archive\n" \
  1057. " - id: internal ID of the archive"
  1058. class ItemFormatter(BaseFormatter):
  1059. KEY_DESCRIPTIONS = {
  1060. 'bpath': 'verbatim POSIX path, can contain any character except NUL',
  1061. 'path': 'path interpreted as text (might be missing non-text characters, see bpath)',
  1062. 'source': 'link target for links (identical to linktarget)',
  1063. 'extra': 'prepends {source} with " -> " for soft links and " link to " for hard links',
  1064. 'csize': 'compressed size',
  1065. 'num_chunks': 'number of chunks in this file',
  1066. 'unique_chunks': 'number of unique chunks in this file',
  1067. }
  1068. KEY_GROUPS = (
  1069. ('type', 'mode', 'uid', 'gid', 'user', 'group', 'path', 'bpath', 'source', 'linktarget', 'flags'),
  1070. ('size', 'csize', 'num_chunks', 'unique_chunks'),
  1071. ('mtime', 'ctime', 'atime', 'isomtime', 'isoctime', 'isoatime'),
  1072. tuple(sorted(hashlib.algorithms_guaranteed)),
  1073. ('archiveid', 'archivename', 'extra'),
  1074. )
  1075. @classmethod
  1076. def available_keys(cls):
  1077. class FakeArchive:
  1078. fpr = name = ""
  1079. from .item import Item
  1080. fake_item = Item(mode=0, path='', user='', group='', mtime=0, uid=0, gid=0)
  1081. formatter = cls(FakeArchive, "")
  1082. keys = []
  1083. keys.extend(formatter.call_keys.keys())
  1084. keys.extend(formatter.get_item_data(fake_item).keys())
  1085. return keys
  1086. @classmethod
  1087. def keys_help(cls):
  1088. help = []
  1089. keys = cls.available_keys()
  1090. for key in cls.FIXED_KEYS:
  1091. keys.remove(key)
  1092. for group in cls.KEY_GROUPS:
  1093. for key in group:
  1094. keys.remove(key)
  1095. text = " - " + key
  1096. if key in cls.KEY_DESCRIPTIONS:
  1097. text += ": " + cls.KEY_DESCRIPTIONS[key]
  1098. help.append(text)
  1099. help.append("")
  1100. assert not keys, str(keys)
  1101. return "\n".join(help)
  1102. def __init__(self, archive, format):
  1103. self.archive = archive
  1104. static_keys = {
  1105. 'archivename': archive.name,
  1106. 'archiveid': archive.fpr,
  1107. }
  1108. static_keys.update(self.FIXED_KEYS)
  1109. self.format = partial_format(format, static_keys)
  1110. self.format_keys = {f[1] for f in Formatter().parse(format)}
  1111. self.call_keys = {
  1112. 'size': self.calculate_size,
  1113. 'csize': self.calculate_csize,
  1114. 'num_chunks': self.calculate_num_chunks,
  1115. 'unique_chunks': self.calculate_unique_chunks,
  1116. 'isomtime': partial(self.format_time, 'mtime'),
  1117. 'isoctime': partial(self.format_time, 'ctime'),
  1118. 'isoatime': partial(self.format_time, 'atime'),
  1119. 'mtime': partial(self.time, 'mtime'),
  1120. 'ctime': partial(self.time, 'ctime'),
  1121. 'atime': partial(self.time, 'atime'),
  1122. }
  1123. for hash_function in hashlib.algorithms_guaranteed:
  1124. self.add_key(hash_function, partial(self.hash_item, hash_function))
  1125. self.used_call_keys = set(self.call_keys) & self.format_keys
  1126. self.item_data = static_keys
  1127. def add_key(self, key, callable_with_item):
  1128. self.call_keys[key] = callable_with_item
  1129. self.used_call_keys = set(self.call_keys) & self.format_keys
  1130. def get_item_data(self, item):
  1131. mode = stat.filemode(item.mode)
  1132. item_type = mode[0]
  1133. item_data = self.item_data
  1134. source = item.get('source', '')
  1135. extra = ''
  1136. if source:
  1137. source = remove_surrogates(source)
  1138. if item_type == 'l':
  1139. extra = ' -> %s' % source
  1140. else:
  1141. mode = 'h' + mode[1:]
  1142. extra = ' link to %s' % source
  1143. item_data['type'] = item_type
  1144. item_data['mode'] = mode
  1145. item_data['user'] = item.user or item.uid
  1146. item_data['group'] = item.group or item.gid
  1147. item_data['uid'] = item.uid
  1148. item_data['gid'] = item.gid
  1149. item_data['path'] = remove_surrogates(item.path)
  1150. item_data['bpath'] = item.path
  1151. item_data['source'] = source
  1152. item_data['linktarget'] = source
  1153. item_data['extra'] = extra
  1154. item_data['flags'] = item.get('bsdflags')
  1155. for key in self.used_call_keys:
  1156. item_data[key] = self.call_keys[key](item)
  1157. return item_data
  1158. def calculate_num_chunks(self, item):
  1159. return len(item.get('chunks', []))
  1160. def calculate_unique_chunks(self, item):
  1161. chunk_index = self.archive.cache.chunks
  1162. return sum(1 for c in item.get('chunks', []) if chunk_index[c.id].refcount == 1)
  1163. def calculate_size(self, item):
  1164. return sum(c.size for c in item.get('chunks', []))
  1165. def calculate_csize(self, item):
  1166. return sum(c.csize for c in item.get('chunks', []))
  1167. def hash_item(self, hash_function, item):
  1168. if 'chunks' not in item:
  1169. return ""
  1170. hash = hashlib.new(hash_function)
  1171. for _, data in self.archive.pipeline.fetch_many([c.id for c in item.chunks]):
  1172. hash.update(data)
  1173. return hash.hexdigest()
  1174. def format_time(self, key, item):
  1175. return format_time(safe_timestamp(item.get(key) or item.mtime))
  1176. def time(self, key, item):
  1177. return safe_timestamp(item.get(key) or item.mtime)
  1178. class ChunkIteratorFileWrapper:
  1179. """File-like wrapper for chunk iterators"""
  1180. def __init__(self, chunk_iterator):
  1181. self.chunk_iterator = chunk_iterator
  1182. self.chunk_offset = 0
  1183. self.chunk = b''
  1184. self.exhausted = False
  1185. def _refill(self):
  1186. remaining = len(self.chunk) - self.chunk_offset
  1187. if not remaining:
  1188. try:
  1189. chunk = next(self.chunk_iterator)
  1190. self.chunk = memoryview(chunk.data)
  1191. except StopIteration:
  1192. self.exhausted = True
  1193. return 0 # EOF
  1194. self.chunk_offset = 0
  1195. remaining = len(self.chunk)
  1196. return remaining
  1197. def _read(self, nbytes):
  1198. if not nbytes:
  1199. return b''
  1200. remaining = self._refill()
  1201. will_read = min(remaining, nbytes)
  1202. self.chunk_offset += will_read
  1203. return self.chunk[self.chunk_offset - will_read:self.chunk_offset]
  1204. def read(self, nbytes):
  1205. parts = []
  1206. while nbytes and not self.exhausted:
  1207. read_data = self._read(nbytes)
  1208. nbytes -= len(read_data)
  1209. parts.append(read_data)
  1210. return b''.join(parts)
  1211. def open_item(archive, item):
  1212. """Return file-like object for archived item (with chunks)."""
  1213. chunk_iterator = archive.pipeline.fetch_many([c.id for c in item.chunks])
  1214. return ChunkIteratorFileWrapper(chunk_iterator)
  1215. def file_status(mode):
  1216. if stat.S_ISREG(mode):
  1217. return 'A'
  1218. elif stat.S_ISDIR(mode):
  1219. return 'd'
  1220. elif stat.S_ISBLK(mode):
  1221. return 'b'
  1222. elif stat.S_ISCHR(mode):
  1223. return 'c'
  1224. elif stat.S_ISLNK(mode):
  1225. return 's'
  1226. elif stat.S_ISFIFO(mode):
  1227. return 'f'
  1228. return '?'
  1229. def chunkit(it, size):
  1230. """
  1231. Chunk an iterator <it> into pieces of <size>.
  1232. >>> list(chunker('ABCDEFG', 3))
  1233. [['A', 'B', 'C'], ['D', 'E', 'F'], ['G']]
  1234. """
  1235. iterable = iter(it)
  1236. return iter(lambda: list(islice(iterable, size)), [])
  1237. def consume(iterator, n=None):
  1238. """Advance the iterator n-steps ahead. If n is none, consume entirely."""
  1239. # Use functions that consume iterators at C speed.
  1240. if n is None:
  1241. # feed the entire iterator into a zero-length deque
  1242. deque(iterator, maxlen=0)
  1243. else:
  1244. # advance to the empty slice starting at position n
  1245. next(islice(iterator, n, n), None)
  1246. # GenericDirEntry, scandir_generic (c) 2012 Ben Hoyt
  1247. # from the python-scandir package (3-clause BSD license, just like us, so no troubles here)
  1248. # note: simplified version
  1249. class GenericDirEntry:
  1250. __slots__ = ('name', '_scandir_path', '_path')
  1251. def __init__(self, scandir_path, name):
  1252. self._scandir_path = scandir_path
  1253. self.name = name
  1254. self._path = None
  1255. @property
  1256. def path(self):
  1257. if self._path is None:
  1258. self._path = os.path.join(self._scandir_path, self.name)
  1259. return self._path
  1260. def stat(self, follow_symlinks=True):
  1261. assert not follow_symlinks
  1262. return os.lstat(self.path)
  1263. def _check_type(self, type):
  1264. st = self.stat(False)
  1265. return stat.S_IFMT(st.st_mode) == type
  1266. def is_dir(self, follow_symlinks=True):
  1267. assert not follow_symlinks
  1268. return self._check_type(stat.S_IFDIR)
  1269. def is_file(self, follow_symlinks=True):
  1270. assert not follow_symlinks
  1271. return self._check_type(stat.S_IFREG)
  1272. def is_symlink(self):
  1273. return self._check_type(stat.S_IFLNK)
  1274. def inode(self):
  1275. st = self.stat(False)
  1276. return st.st_ino
  1277. def __repr__(self):
  1278. return '<{0}: {1!r}>'.format(self.__class__.__name__, self.path)
  1279. def scandir_generic(path='.'):
  1280. """Like os.listdir(), but yield DirEntry objects instead of returning a list of names."""
  1281. for name in sorted(os.listdir(path)):
  1282. yield GenericDirEntry(path, name)
  1283. try:
  1284. from os import scandir
  1285. except ImportError:
  1286. try:
  1287. # Try python-scandir on Python 3.4
  1288. from scandir import scandir
  1289. except ImportError:
  1290. # If python-scandir is not installed, then use a version that is just as slow as listdir.
  1291. scandir = scandir_generic
  1292. def scandir_inorder(path='.'):
  1293. return sorted(scandir(path), key=lambda dirent: dirent.inode())
  1294. def clean_lines(lines, lstrip=None, rstrip=None, remove_empty=True, remove_comments=True):
  1295. """
  1296. clean lines (usually read from a config file):
  1297. 1. strip whitespace (left and right), 2. remove empty lines, 3. remove comments.
  1298. note: only "pure comment lines" are supported, no support for "trailing comments".
  1299. :param lines: input line iterator (e.g. list or open text file) that gives unclean input lines
  1300. :param lstrip: lstrip call arguments or False, if lstripping is not desired
  1301. :param rstrip: rstrip call arguments or False, if rstripping is not desired
  1302. :param remove_comments: remove comment lines (lines starting with "#")
  1303. :param remove_empty: remove empty lines
  1304. :return: yields processed lines
  1305. """
  1306. for line in lines:
  1307. if lstrip is not False:
  1308. line = line.lstrip(lstrip)
  1309. if rstrip is not False:
  1310. line = line.rstrip(rstrip)
  1311. if remove_empty and not line:
  1312. continue
  1313. if remove_comments and line.startswith('#'):
  1314. continue
  1315. yield line
  1316. class CompressionDecider1:
  1317. def __init__(self, compression, compression_files):
  1318. """
  1319. Initialize a CompressionDecider instance (and read config files, if needed).
  1320. :param compression: default CompressionSpec (e.g. from --compression option)
  1321. :param compression_files: list of compression config files (e.g. from --compression-from) or
  1322. a list of other line iterators
  1323. """
  1324. self.compression = compression
  1325. if not compression_files:
  1326. self.matcher = None
  1327. else:
  1328. self.matcher = PatternMatcher(fallback=compression)
  1329. for file in compression_files:
  1330. try:
  1331. for line in clean_lines(file):
  1332. try:
  1333. compr_spec, fn_pattern = line.split(':', 1)
  1334. except:
  1335. continue
  1336. self.matcher.add([parse_pattern(fn_pattern)], CompressionSpec(compr_spec))
  1337. finally:
  1338. if hasattr(file, 'close'):
  1339. file.close()
  1340. def decide(self, path):
  1341. if self.matcher is not None:
  1342. return self.matcher.match(path)
  1343. return self.compression
  1344. class CompressionDecider2:
  1345. logger = create_logger('borg.debug.file-compression')
  1346. def __init__(self, compression):
  1347. self.compression = compression
  1348. def decide(self, chunk):
  1349. # nothing fancy here yet: we either use what the metadata says or the default
  1350. # later, we can decide based on the chunk data also.
  1351. # if we compress the data here to decide, we can even update the chunk data
  1352. # and modify the metadata as desired.
  1353. compr_spec = chunk.meta.get('compress', self.compression)
  1354. if compr_spec['name'] == 'auto':
  1355. # we did not decide yet, use heuristic:
  1356. compr_spec, chunk = self.heuristic_lz4(compr_spec, chunk)
  1357. return compr_spec, chunk
  1358. def heuristic_lz4(self, compr_args, chunk):
  1359. from .compress import get_compressor
  1360. meta, data = chunk
  1361. lz4 = get_compressor('lz4')
  1362. cdata = lz4.compress(data)
  1363. data_len = len(data)
  1364. cdata_len = len(cdata)
  1365. if cdata_len < data_len:
  1366. compr_spec = compr_args['spec']
  1367. else:
  1368. # uncompressible - we could have a special "uncompressible compressor"
  1369. # that marks such data as uncompressible via compression-type metadata.
  1370. compr_spec = CompressionSpec('none')
  1371. compr_args.update(compr_spec)
  1372. self.logger.debug("len(data) == %d, len(lz4(data)) == %d, choosing %s", data_len, cdata_len, compr_spec)
  1373. return compr_args, Chunk(data, **meta)
  1374. class ErrorIgnoringTextIOWrapper(io.TextIOWrapper):
  1375. def read(self, n):
  1376. if not self.closed:
  1377. try:
  1378. return super().read(n)
  1379. except BrokenPipeError:
  1380. try:
  1381. super().close()
  1382. except OSError:
  1383. pass
  1384. return ''
  1385. def write(self, s):
  1386. if not self.closed:
  1387. try:
  1388. return super().write(s)
  1389. except BrokenPipeError:
  1390. try:
  1391. super().close()
  1392. except OSError:
  1393. pass
  1394. return len(s)
  1395. class SignalException(BaseException):
  1396. """base class for all signal-based exceptions"""
  1397. class SigHup(SignalException):
  1398. """raised on SIGHUP signal"""
  1399. class SigTerm(SignalException):
  1400. """raised on SIGTERM signal"""
  1401. @contextlib.contextmanager
  1402. def signal_handler(sig, handler):
  1403. """
  1404. when entering context, set up signal handler <handler> for signal <sig>.
  1405. when leaving context, restore original signal handler.
  1406. <sig> can bei either a str when giving a signal.SIGXXX attribute name (it
  1407. won't crash if the attribute name does not exist as some names are platform
  1408. specific) or a int, when giving a signal number.
  1409. <handler> is any handler value as accepted by the signal.signal(sig, handler).
  1410. """
  1411. if isinstance(sig, str):
  1412. sig = getattr(signal, sig, None)
  1413. if sig is not None:
  1414. orig_handler = signal.signal(sig, handler)
  1415. try:
  1416. yield
  1417. finally:
  1418. if sig is not None:
  1419. signal.signal(sig, orig_handler)
  1420. def raising_signal_handler(exc_cls):
  1421. def handler(sig_no, frame):
  1422. # setting SIG_IGN avoids that an incoming second signal of this
  1423. # kind would raise a 2nd exception while we still process the
  1424. # exception handler for exc_cls for the 1st signal.
  1425. signal.signal(sig_no, signal.SIG_IGN)
  1426. raise exc_cls
  1427. return handler
  1428. def swidth_slice(string, max_width):
  1429. """
  1430. Return a slice of *max_width* cells from *string*.
  1431. Negative *max_width* means from the end of string.
  1432. *max_width* is in units of character cells (or "columns").
  1433. Latin characters are usually one cell wide, many CJK characters are two cells wide.
  1434. """
  1435. from .platform import swidth
  1436. reverse = max_width < 0
  1437. max_width = abs(max_width)
  1438. if reverse:
  1439. string = reversed(string)
  1440. current_swidth = 0
  1441. result = []
  1442. for character in string:
  1443. current_swidth += swidth(character)
  1444. if current_swidth > max_width:
  1445. break
  1446. result.append(character)
  1447. if reverse:
  1448. result.reverse()
  1449. return ''.join(result)