helpers.py 47 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384
  1. import argparse
  2. from binascii import hexlify
  3. from collections import namedtuple
  4. import contextlib
  5. from functools import wraps
  6. import grp
  7. import os
  8. import stat
  9. import textwrap
  10. import pwd
  11. import re
  12. from shutil import get_terminal_size
  13. import sys
  14. import platform
  15. import signal
  16. import threading
  17. import time
  18. import unicodedata
  19. import io
  20. import errno
  21. import logging
  22. from .logger import create_logger
  23. logger = create_logger()
  24. from datetime import datetime, timezone, timedelta
  25. from fnmatch import translate
  26. from operator import attrgetter
  27. from . import __version__ as borg_version
  28. from . import __version_tuple__ as borg_version_tuple
  29. from . import hashindex
  30. from . import chunker
  31. from . import crypto
  32. from . import shellpattern
  33. import msgpack
  34. import msgpack.fallback
  35. import socket
  36. # return codes returned by borg command
  37. # when borg is killed by signal N, rc = 128 + N
  38. EXIT_SUCCESS = 0 # everything done, no problems
  39. EXIT_WARNING = 1 # reached normal end of operation, but there were issues
  40. EXIT_ERROR = 2 # terminated abruptly, did not reach end of operation
  41. class Error(Exception):
  42. """Error base class"""
  43. # if we raise such an Error and it is only catched by the uppermost
  44. # exception handler (that exits short after with the given exit_code),
  45. # it is always a (fatal and abrupt) EXIT_ERROR, never just a warning.
  46. exit_code = EXIT_ERROR
  47. # show a traceback?
  48. traceback = False
  49. def get_message(self):
  50. return type(self).__doc__.format(*self.args)
  51. class ErrorWithTraceback(Error):
  52. """like Error, but show a traceback also"""
  53. traceback = True
  54. class IntegrityError(ErrorWithTraceback):
  55. """Data integrity error: {}"""
  56. class ExtensionModuleError(Error):
  57. """The Borg binary extension modules do not seem to be properly installed"""
  58. class NoManifestError(Error):
  59. """Repository has no manifest."""
  60. class PlaceholderError(Error):
  61. """Formatting Error: "{}".format({}): {}({})"""
  62. def check_extension_modules():
  63. from . import platform
  64. if hashindex.API_VERSION != 3:
  65. raise ExtensionModuleError
  66. if chunker.API_VERSION != 2:
  67. raise ExtensionModuleError
  68. if crypto.API_VERSION != 3:
  69. raise ExtensionModuleError
  70. if platform.API_VERSION != 3:
  71. raise ExtensionModuleError
  72. class Manifest:
  73. MANIFEST_ID = b'\0' * 32
  74. def __init__(self, key, repository, item_keys=None):
  75. from .archive import ITEM_KEYS
  76. self.archives = {}
  77. self.config = {}
  78. self.key = key
  79. self.repository = repository
  80. self.item_keys = frozenset(item_keys) if item_keys is not None else ITEM_KEYS
  81. self.tam_verified = False
  82. @classmethod
  83. def load(cls, repository, key=None, force_tam_not_required=False):
  84. from .key import key_factory, tam_required_file, tam_required
  85. from .repository import Repository
  86. from .archive import ITEM_KEYS
  87. try:
  88. cdata = repository.get(cls.MANIFEST_ID)
  89. except Repository.ObjectNotFound:
  90. raise NoManifestError
  91. if not key:
  92. key = key_factory(repository, cdata)
  93. manifest = cls(key, repository)
  94. data = key.decrypt(None, cdata)
  95. m, manifest.tam_verified = key.unpack_and_verify_manifest(data, force_tam_not_required=force_tam_not_required)
  96. manifest.id = key.id_hash(data)
  97. if not m.get(b'version') == 1:
  98. raise ValueError('Invalid manifest version')
  99. manifest.archives = dict((k.decode('utf-8'), v) for k, v in m[b'archives'].items())
  100. manifest.timestamp = m.get(b'timestamp')
  101. if manifest.timestamp:
  102. manifest.timestamp = manifest.timestamp.decode('ascii')
  103. manifest.config = m[b'config']
  104. # valid item keys are whatever is known in the repo or every key we know
  105. manifest.item_keys = frozenset(m.get(b'item_keys', [])) | ITEM_KEYS
  106. if manifest.tam_verified:
  107. manifest_required = manifest.config.get(b'tam_required', False)
  108. security_required = tam_required(repository)
  109. if manifest_required and not security_required:
  110. logger.debug('Manifest is TAM verified and says TAM is required, updating security database...')
  111. file = tam_required_file(repository)
  112. open(file, 'w').close()
  113. if not manifest_required and security_required:
  114. logger.debug('Manifest is TAM verified and says TAM is *not* required, updating security database...')
  115. os.unlink(tam_required_file(repository))
  116. return manifest, key
  117. def write(self):
  118. if self.key.tam_required:
  119. self.config[b'tam_required'] = True
  120. self.timestamp = datetime.utcnow().isoformat()
  121. m = {
  122. 'version': 1,
  123. 'archives': StableDict((name, StableDict(archive)) for name, archive in self.archives.items()),
  124. 'timestamp': self.timestamp,
  125. 'config': StableDict(self.config),
  126. 'item_keys': tuple(sorted(self.item_keys)),
  127. }
  128. self.tam_verified = True
  129. data = self.key.pack_and_authenticate_metadata(m)
  130. self.id = self.key.id_hash(data)
  131. self.repository.put(self.MANIFEST_ID, self.key.encrypt(data, none_compression=True))
  132. def list_archive_infos(self, sort_by=None, reverse=False):
  133. # inexpensive Archive.list_archives replacement if we just need .name, .id, .ts
  134. ArchiveInfo = namedtuple('ArchiveInfo', 'name id ts')
  135. archives = []
  136. for name, values in self.archives.items():
  137. ts = parse_timestamp(values[b'time'].decode('utf-8'))
  138. id = values[b'id']
  139. archives.append(ArchiveInfo(name=name, id=id, ts=ts))
  140. if sort_by is not None:
  141. archives = sorted(archives, key=attrgetter(sort_by), reverse=reverse)
  142. return archives
  143. def prune_within(archives, within):
  144. multiplier = {'H': 1, 'd': 24, 'w': 24 * 7, 'm': 24 * 31, 'y': 24 * 365}
  145. try:
  146. hours = int(within[:-1]) * multiplier[within[-1]]
  147. except (KeyError, ValueError):
  148. # I don't like how this displays the original exception too:
  149. raise argparse.ArgumentTypeError('Unable to parse --within option: "%s"' % within)
  150. if hours <= 0:
  151. raise argparse.ArgumentTypeError('Number specified using --within option must be positive')
  152. target = datetime.now(timezone.utc) - timedelta(seconds=hours * 3600)
  153. return [a for a in archives if a.ts > target]
  154. def prune_split(archives, pattern, n, skip=[]):
  155. last = None
  156. keep = []
  157. if n == 0:
  158. return keep
  159. for a in sorted(archives, key=attrgetter('ts'), reverse=True):
  160. period = to_localtime(a.ts).strftime(pattern)
  161. if period != last:
  162. last = period
  163. if a not in skip:
  164. keep.append(a)
  165. if len(keep) == n:
  166. break
  167. return keep
  168. class Statistics:
  169. def __init__(self):
  170. self.osize = self.csize = self.usize = self.nfiles = 0
  171. self.last_progress = 0 # timestamp when last progress was shown
  172. def update(self, size, csize, unique):
  173. self.osize += size
  174. self.csize += csize
  175. if unique:
  176. self.usize += csize
  177. summary = """\
  178. Original size Compressed size Deduplicated size
  179. {label:15} {stats.osize_fmt:>20s} {stats.csize_fmt:>20s} {stats.usize_fmt:>20s}"""
  180. def __str__(self):
  181. return self.summary.format(stats=self, label='This archive:')
  182. def __repr__(self):
  183. return "<{cls} object at {hash:#x} ({self.osize}, {self.csize}, {self.usize})>".format(cls=type(self).__name__, hash=id(self), self=self)
  184. @property
  185. def osize_fmt(self):
  186. return format_file_size(self.osize)
  187. @property
  188. def usize_fmt(self):
  189. return format_file_size(self.usize)
  190. @property
  191. def csize_fmt(self):
  192. return format_file_size(self.csize)
  193. def show_progress(self, item=None, final=False, stream=None, dt=None):
  194. now = time.monotonic()
  195. if dt is None or now - self.last_progress > dt:
  196. self.last_progress = now
  197. columns, lines = get_terminal_size()
  198. if not final:
  199. msg = '{0.osize_fmt} O {0.csize_fmt} C {0.usize_fmt} D {0.nfiles} N '.format(self)
  200. path = remove_surrogates(item[b'path']) if item else ''
  201. space = columns - len(msg)
  202. if space < 12:
  203. msg = ''
  204. space = columns - len(msg)
  205. if space >= 8:
  206. if space < len('...') + len(path):
  207. path = '%s...%s' % (path[:(space // 2) - len('...')], path[-space // 2:])
  208. msg += "{0:<{space}}".format(path, space=space)
  209. else:
  210. msg = ' ' * columns
  211. print(msg, file=stream or sys.stderr, end="\r", flush=True)
  212. def get_keys_dir():
  213. """Determine where to repository keys and cache"""
  214. xdg_config = os.environ.get('XDG_CONFIG_HOME', os.path.join(os.path.expanduser('~'), '.config'))
  215. keys_dir = os.environ.get('BORG_KEYS_DIR', os.path.join(xdg_config, 'borg', 'keys'))
  216. if not os.path.exists(keys_dir):
  217. os.makedirs(keys_dir)
  218. os.chmod(keys_dir, stat.S_IRWXU)
  219. return keys_dir
  220. def get_security_dir(repository_id=None):
  221. """Determine where to store local security information."""
  222. xdg_config = os.environ.get('XDG_CONFIG_HOME', os.path.join(os.path.expanduser('~'), '.config'))
  223. security_dir = os.environ.get('BORG_SECURITY_DIR', os.path.join(xdg_config, 'borg', 'security'))
  224. if repository_id:
  225. security_dir = os.path.join(security_dir, repository_id)
  226. if not os.path.exists(security_dir):
  227. os.makedirs(security_dir)
  228. os.chmod(security_dir, stat.S_IRWXU)
  229. return security_dir
  230. def get_cache_dir():
  231. """Determine where to repository keys and cache"""
  232. xdg_cache = os.environ.get('XDG_CACHE_HOME', os.path.join(os.path.expanduser('~'), '.cache'))
  233. cache_dir = os.environ.get('BORG_CACHE_DIR', os.path.join(xdg_cache, 'borg'))
  234. if not os.path.exists(cache_dir):
  235. os.makedirs(cache_dir)
  236. os.chmod(cache_dir, stat.S_IRWXU)
  237. with open(os.path.join(cache_dir, 'CACHEDIR.TAG'), 'w') as fd:
  238. fd.write(textwrap.dedent("""
  239. Signature: 8a477f597d28d172789f06886806bc55
  240. # This file is a cache directory tag created by Borg.
  241. # For information about cache directory tags, see:
  242. # http://www.brynosaurus.com/cachedir/
  243. """).lstrip())
  244. return cache_dir
  245. def to_localtime(ts):
  246. """Convert datetime object from UTC to local time zone"""
  247. return datetime(*time.localtime((ts - datetime(1970, 1, 1, tzinfo=timezone.utc)).total_seconds())[:6])
  248. def parse_timestamp(timestamp):
  249. """Parse a ISO 8601 timestamp string"""
  250. if '.' in timestamp: # microseconds might not be present
  251. return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%f').replace(tzinfo=timezone.utc)
  252. else:
  253. return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc)
  254. def parse_add_pattern(patternstr, roots, patterns):
  255. """Parse a pattern string and add it to roots or patterns depending on the pattern type."""
  256. pattern = parse_inclexcl_pattern(patternstr)
  257. if pattern.ptype is RootPath:
  258. roots.append(pattern.pattern)
  259. else:
  260. patterns.append(pattern)
  261. def load_pattern_file(fileobj, roots, patterns):
  262. for patternstr in clean_lines(fileobj):
  263. parse_add_pattern(patternstr, roots, patterns)
  264. def load_exclude_file(fileobj, patterns):
  265. for patternstr in clean_lines(fileobj):
  266. patterns.append(parse_exclude_pattern(patternstr))
  267. class ArgparsePatternAction(argparse.Action):
  268. def __init__(self, nargs=1, **kw):
  269. super().__init__(nargs=nargs, **kw)
  270. def __call__(self, parser, args, values, option_string=None):
  271. parse_add_pattern(values[0], args.paths, args.patterns)
  272. class ArgparsePatternFileAction(argparse.Action):
  273. def __init__(self, nargs=1, **kw):
  274. super().__init__(nargs=nargs, **kw)
  275. def __call__(self, parser, args, values, option_string=None):
  276. """Load and parse patterns from a file.
  277. Lines empty or starting with '#' after stripping whitespace on both line ends are ignored.
  278. """
  279. filename = values[0]
  280. with open(filename) as f:
  281. self.parse(f, args)
  282. def parse(self, fobj, args):
  283. load_pattern_file(fobj, args.paths, args.patterns)
  284. class ArgparseExcludeFileAction(ArgparsePatternFileAction):
  285. def parse(self, fobj, args):
  286. load_exclude_file(fobj, args.patterns)
  287. class PatternMatcher:
  288. def __init__(self, fallback=None):
  289. self._items = []
  290. # Value to return from match function when none of the patterns match.
  291. self.fallback = fallback
  292. def add(self, patterns, value):
  293. """Add list of patterns to internal list. The given value is returned from the match function when one of the
  294. given patterns matches.
  295. """
  296. self._items.extend((i, value) for i in patterns)
  297. def add_inclexcl(self, patterns):
  298. """Add list of patterns (of type InclExclPattern) to internal list. The patterns ptype member is returned from
  299. the match function when one of the given patterns matches.
  300. """
  301. self._items.extend(patterns)
  302. def match(self, path):
  303. for (pattern, value) in self._items:
  304. if pattern.match(path):
  305. return value
  306. return self.fallback
  307. def normalized(func):
  308. """ Decorator for the Pattern match methods, returning a wrapper that
  309. normalizes OSX paths to match the normalized pattern on OSX, and
  310. returning the original method on other platforms"""
  311. @wraps(func)
  312. def normalize_wrapper(self, path):
  313. return func(self, unicodedata.normalize("NFD", path))
  314. if sys.platform in ('darwin',):
  315. # HFS+ converts paths to a canonical form, so users shouldn't be
  316. # required to enter an exact match
  317. return normalize_wrapper
  318. else:
  319. # Windows and Unix filesystems allow different forms, so users
  320. # always have to enter an exact match
  321. return func
  322. class PatternBase:
  323. """Shared logic for inclusion/exclusion patterns.
  324. """
  325. PREFIX = NotImplemented
  326. def __init__(self, pattern):
  327. self.pattern_orig = pattern
  328. self.match_count = 0
  329. if sys.platform in ('darwin',):
  330. pattern = unicodedata.normalize("NFD", pattern)
  331. self._prepare(pattern)
  332. @normalized
  333. def match(self, path):
  334. matches = self._match(path)
  335. if matches:
  336. self.match_count += 1
  337. return matches
  338. def __repr__(self):
  339. return '%s(%s)' % (type(self), self.pattern)
  340. def __str__(self):
  341. return self.pattern_orig
  342. def _prepare(self, pattern):
  343. raise NotImplementedError
  344. def _match(self, path):
  345. raise NotImplementedError
  346. # For PathPrefixPattern, FnmatchPattern and ShellPattern, we require that the pattern either match the whole path
  347. # or an initial segment of the path up to but not including a path separator. To unify the two cases, we add a path
  348. # separator to the end of the path before matching.
  349. class PathPrefixPattern(PatternBase):
  350. """Literal files or directories listed on the command line
  351. for some operations (e.g. extract, but not create).
  352. If a directory is specified, all paths that start with that
  353. path match as well. A trailing slash makes no difference.
  354. """
  355. PREFIX = "pp"
  356. def _prepare(self, pattern):
  357. self.pattern = os.path.normpath(pattern).rstrip(os.path.sep) + os.path.sep
  358. def _match(self, path):
  359. return (path + os.path.sep).startswith(self.pattern)
  360. class FnmatchPattern(PatternBase):
  361. """Shell glob patterns to exclude. A trailing slash means to
  362. exclude the contents of a directory, but not the directory itself.
  363. """
  364. PREFIX = "fm"
  365. def _prepare(self, pattern):
  366. if pattern.endswith(os.path.sep):
  367. pattern = os.path.normpath(pattern).rstrip(os.path.sep) + os.path.sep + '*' + os.path.sep
  368. else:
  369. pattern = os.path.normpath(pattern) + os.path.sep + '*'
  370. self.pattern = pattern
  371. # fnmatch and re.match both cache compiled regular expressions.
  372. # Nevertheless, this is about 10 times faster.
  373. self.regex = re.compile(translate(self.pattern))
  374. def _match(self, path):
  375. return (self.regex.match(path + os.path.sep) is not None)
  376. class ShellPattern(PatternBase):
  377. """Shell glob patterns to exclude. A trailing slash means to
  378. exclude the contents of a directory, but not the directory itself.
  379. """
  380. PREFIX = "sh"
  381. def _prepare(self, pattern):
  382. sep = os.path.sep
  383. if pattern.endswith(sep):
  384. pattern = os.path.normpath(pattern).rstrip(sep) + sep + "**" + sep + "*" + sep
  385. else:
  386. pattern = os.path.normpath(pattern) + sep + "**" + sep + "*"
  387. self.pattern = pattern
  388. self.regex = re.compile(shellpattern.translate(self.pattern))
  389. def _match(self, path):
  390. return (self.regex.match(path + os.path.sep) is not None)
  391. class RegexPattern(PatternBase):
  392. """Regular expression to exclude.
  393. """
  394. PREFIX = "re"
  395. def _prepare(self, pattern):
  396. self.pattern = pattern
  397. self.regex = re.compile(pattern)
  398. def _match(self, path):
  399. # Normalize path separators
  400. if os.path.sep != '/':
  401. path = path.replace(os.path.sep, '/')
  402. return (self.regex.search(path) is not None)
  403. _PATTERN_STYLES = set([
  404. FnmatchPattern,
  405. PathPrefixPattern,
  406. RegexPattern,
  407. ShellPattern,
  408. ])
  409. _PATTERN_STYLE_BY_PREFIX = dict((i.PREFIX, i) for i in _PATTERN_STYLES)
  410. InclExclPattern = namedtuple('InclExclPattern', 'pattern ptype')
  411. RootPath = object()
  412. def parse_pattern(pattern, fallback=FnmatchPattern):
  413. """Read pattern from string and return an instance of the appropriate implementation class.
  414. """
  415. if len(pattern) > 2 and pattern[2] == ":" and pattern[:2].isalnum():
  416. (style, pattern) = (pattern[:2], pattern[3:])
  417. cls = _PATTERN_STYLE_BY_PREFIX.get(style, None)
  418. if cls is None:
  419. raise ValueError("Unknown pattern style: {}".format(style))
  420. else:
  421. cls = fallback
  422. return cls(pattern)
  423. def parse_exclude_pattern(pattern, fallback=FnmatchPattern):
  424. """Read pattern from string and return an instance of the appropriate implementation class.
  425. """
  426. epattern = parse_pattern(pattern, fallback)
  427. return InclExclPattern(epattern, False)
  428. def parse_inclexcl_pattern(pattern, fallback=ShellPattern):
  429. """Read pattern from string and return a InclExclPattern object."""
  430. type_prefix_map = {
  431. '-': False,
  432. '+': True,
  433. 'R': RootPath,
  434. 'r': RootPath,
  435. }
  436. try:
  437. ptype = type_prefix_map[pattern[0]]
  438. pattern = pattern[1:].lstrip()
  439. if not pattern:
  440. raise ValueError("Missing pattern!")
  441. except (IndexError, KeyError, ValueError):
  442. raise argparse.ArgumentTypeError("Unable to parse pattern: {}".format(pattern))
  443. if ptype is RootPath:
  444. pobj = pattern
  445. else:
  446. pobj = parse_pattern(pattern, fallback)
  447. return InclExclPattern(pobj, ptype)
  448. def timestamp(s):
  449. """Convert a --timestamp=s argument to a datetime object"""
  450. try:
  451. # is it pointing to a file / directory?
  452. ts = os.stat(s).st_mtime
  453. return datetime.utcfromtimestamp(ts)
  454. except OSError:
  455. # didn't work, try parsing as timestamp. UTC, no TZ, no microsecs support.
  456. for format in ('%Y-%m-%dT%H:%M:%SZ', '%Y-%m-%dT%H:%M:%S+00:00',
  457. '%Y-%m-%dT%H:%M:%S', '%Y-%m-%d %H:%M:%S',
  458. '%Y-%m-%dT%H:%M', '%Y-%m-%d %H:%M',
  459. '%Y-%m-%d', '%Y-%j',
  460. ):
  461. try:
  462. return datetime.strptime(s, format)
  463. except ValueError:
  464. continue
  465. raise ValueError
  466. def ChunkerParams(s):
  467. chunk_min, chunk_max, chunk_mask, window_size = s.split(',')
  468. if int(chunk_max) > 23:
  469. raise ValueError('max. chunk size exponent must not be more than 23 (2^23 = 8MiB max. chunk size)')
  470. return int(chunk_min), int(chunk_max), int(chunk_mask), int(window_size)
  471. def CompressionSpec(s):
  472. values = s.split(',')
  473. count = len(values)
  474. if count < 1:
  475. raise ValueError
  476. # --compression algo[,level]
  477. name = values[0]
  478. if name in ('none', 'lz4', ):
  479. return dict(name=name)
  480. if name in ('zlib', 'lzma', ):
  481. if count < 2:
  482. level = 6 # default compression level in py stdlib
  483. elif count == 2:
  484. level = int(values[1])
  485. if not 0 <= level <= 9:
  486. raise ValueError
  487. else:
  488. raise ValueError
  489. return dict(name=name, level=level)
  490. raise ValueError
  491. def PrefixSpec(s):
  492. return replace_placeholders(s)
  493. def dir_is_cachedir(path):
  494. """Determines whether the specified path is a cache directory (and
  495. therefore should potentially be excluded from the backup) according to
  496. the CACHEDIR.TAG protocol
  497. (http://www.brynosaurus.com/cachedir/spec.html).
  498. """
  499. tag_contents = b'Signature: 8a477f597d28d172789f06886806bc55'
  500. tag_path = os.path.join(path, 'CACHEDIR.TAG')
  501. try:
  502. if os.path.exists(tag_path):
  503. with open(tag_path, 'rb') as tag_file:
  504. tag_data = tag_file.read(len(tag_contents))
  505. if tag_data == tag_contents:
  506. return True
  507. except OSError:
  508. pass
  509. return False
  510. def dir_is_tagged(path, exclude_caches, exclude_if_present):
  511. """Determines whether the specified path is excluded by being a cache
  512. directory or containing user-specified tag files. Returns a list of the
  513. paths of the tag files (either CACHEDIR.TAG or the matching
  514. user-specified files).
  515. """
  516. tag_paths = []
  517. if exclude_caches and dir_is_cachedir(path):
  518. tag_paths.append(os.path.join(path, 'CACHEDIR.TAG'))
  519. if exclude_if_present is not None:
  520. for tag in exclude_if_present:
  521. tag_path = os.path.join(path, tag)
  522. if os.path.isfile(tag_path):
  523. tag_paths.append(tag_path)
  524. return tag_paths
  525. def format_line(format, data):
  526. try:
  527. return format.format(**data)
  528. except Exception as e:
  529. raise PlaceholderError(format, data, e.__class__.__name__, str(e))
  530. def replace_placeholders(text):
  531. """Replace placeholders in text with their values."""
  532. current_time = datetime.now()
  533. data = {
  534. 'pid': os.getpid(),
  535. 'fqdn': socket.getfqdn(),
  536. 'hostname': socket.gethostname(),
  537. 'now': current_time.now(),
  538. 'utcnow': current_time.utcnow(),
  539. 'user': uid2user(os.getuid(), os.getuid()),
  540. 'borgversion': borg_version,
  541. 'borgmajor': '%d' % borg_version_tuple[:1],
  542. 'borgminor': '%d.%d' % borg_version_tuple[:2],
  543. 'borgpatch': '%d.%d.%d' % borg_version_tuple[:3],
  544. }
  545. return format_line(text, data)
  546. def safe_timestamp(item_timestamp_ns):
  547. try:
  548. return datetime.fromtimestamp(bigint_to_int(item_timestamp_ns) / 1e9)
  549. except OverflowError:
  550. # likely a broken file time and datetime did not want to go beyond year 9999
  551. return datetime(9999, 12, 31, 23, 59, 59)
  552. def format_time(t):
  553. """use ISO-8601 date and time format
  554. """
  555. return t.strftime('%a, %Y-%m-%d %H:%M:%S')
  556. def format_timedelta(td):
  557. """Format timedelta in a human friendly format
  558. """
  559. # Since td.total_seconds() requires python 2.7
  560. ts = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6)
  561. s = ts % 60
  562. m = int(ts / 60) % 60
  563. h = int(ts / 3600) % 24
  564. txt = '%.2f seconds' % s
  565. if m:
  566. txt = '%d minutes %s' % (m, txt)
  567. if h:
  568. txt = '%d hours %s' % (h, txt)
  569. if td.days:
  570. txt = '%d days %s' % (td.days, txt)
  571. return txt
  572. def format_file_size(v, precision=2):
  573. """Format file size into a human friendly format
  574. """
  575. return sizeof_fmt_decimal(v, suffix='B', sep=' ', precision=precision)
  576. def sizeof_fmt(num, suffix='B', units=None, power=None, sep='', precision=2):
  577. for unit in units[:-1]:
  578. if abs(round(num, precision)) < power:
  579. if isinstance(num, int):
  580. return "{}{}{}{}".format(num, sep, unit, suffix)
  581. else:
  582. return "{:3.{}f}{}{}{}".format(num, precision, sep, unit, suffix)
  583. num /= float(power)
  584. return "{:.{}f}{}{}{}".format(num, precision, sep, units[-1], suffix)
  585. def sizeof_fmt_iec(num, suffix='B', sep='', precision=2):
  586. return sizeof_fmt(num, suffix=suffix, sep=sep, precision=precision, units=['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'], power=1024)
  587. def sizeof_fmt_decimal(num, suffix='B', sep='', precision=2):
  588. return sizeof_fmt(num, suffix=suffix, sep=sep, precision=precision, units=['', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y'], power=1000)
  589. def format_archive(archive):
  590. return '%-36s %s' % (archive.name, format_time(to_localtime(archive.ts)))
  591. def memoize(function):
  592. cache = {}
  593. def decorated_function(*args):
  594. try:
  595. return cache[args]
  596. except KeyError:
  597. val = function(*args)
  598. cache[args] = val
  599. return val
  600. return decorated_function
  601. class Buffer:
  602. """
  603. provide a thread-local buffer
  604. """
  605. def __init__(self, allocator, size=4096, limit=None):
  606. """
  607. Initialize the buffer: use allocator(size) call to allocate a buffer.
  608. Optionally, set the upper <limit> for the buffer size.
  609. """
  610. assert callable(allocator), 'must give alloc(size) function as first param'
  611. assert limit is None or size <= limit, 'initial size must be <= limit'
  612. self._thread_local = threading.local()
  613. self.allocator = allocator
  614. self.limit = limit
  615. self.resize(size, init=True)
  616. def __len__(self):
  617. return len(self._thread_local.buffer)
  618. def resize(self, size, init=False):
  619. """
  620. resize the buffer - to avoid frequent reallocation, we usually always grow (if needed).
  621. giving init=True it is possible to first-time initialize or shrink the buffer.
  622. if a buffer size beyond the limit is requested, raise ValueError.
  623. """
  624. size = int(size)
  625. if self.limit is not None and size > self.limit:
  626. raise ValueError('Requested buffer size %d is above the limit of %d.' % (size, self.limit))
  627. if init or len(self) < size:
  628. self._thread_local.buffer = self.allocator(size)
  629. def get(self, size=None, init=False):
  630. """
  631. return a buffer of at least the requested size (None: any current size).
  632. init=True can be given to trigger shrinking of the buffer to the given size.
  633. """
  634. if size is not None:
  635. self.resize(size, init)
  636. return self._thread_local.buffer
  637. @memoize
  638. def uid2user(uid, default=None):
  639. try:
  640. return pwd.getpwuid(uid).pw_name
  641. except KeyError:
  642. return default
  643. @memoize
  644. def user2uid(user, default=None):
  645. try:
  646. return user and pwd.getpwnam(user).pw_uid
  647. except KeyError:
  648. return default
  649. @memoize
  650. def gid2group(gid, default=None):
  651. try:
  652. return grp.getgrgid(gid).gr_name
  653. except KeyError:
  654. return default
  655. @memoize
  656. def group2gid(group, default=None):
  657. try:
  658. return group and grp.getgrnam(group).gr_gid
  659. except KeyError:
  660. return default
  661. def posix_acl_use_stored_uid_gid(acl):
  662. """Replace the user/group field with the stored uid/gid
  663. """
  664. entries = []
  665. for entry in safe_decode(acl).split('\n'):
  666. if entry:
  667. fields = entry.split(':')
  668. if len(fields) == 4:
  669. entries.append(':'.join([fields[0], fields[3], fields[2]]))
  670. else:
  671. entries.append(entry)
  672. return safe_encode('\n'.join(entries))
  673. def safe_decode(s, coding='utf-8', errors='surrogateescape'):
  674. """decode bytes to str, with round-tripping "invalid" bytes"""
  675. return s.decode(coding, errors)
  676. def safe_encode(s, coding='utf-8', errors='surrogateescape'):
  677. """encode str to bytes, with round-tripping "invalid" bytes"""
  678. return s.encode(coding, errors)
  679. def bin_to_hex(binary):
  680. return hexlify(binary).decode('ascii')
  681. class Location:
  682. """Object representing a repository / archive location
  683. """
  684. proto = user = host = port = path = archive = None
  685. # user must not contain "@", ":" or "/".
  686. # Quoting adduser error message:
  687. # "To avoid problems, the username should consist only of letters, digits,
  688. # underscores, periods, at signs and dashes, and not start with a dash
  689. # (as defined by IEEE Std 1003.1-2001)."
  690. # We use "@" as separator between username and hostname, so we must
  691. # disallow it within the pure username part.
  692. optional_user_re = r"""
  693. (?:(?P<user>[^@:/]+)@)?
  694. """
  695. # path must not contain :: (it ends at :: or string end), but may contain single colons.
  696. # to avoid ambiguities with other regexes, it must also not start with ":".
  697. path_re = r"""
  698. (?!:) # not starting with ":"
  699. (?P<path>([^:]|(:(?!:)))+) # any chars, but no "::"
  700. """
  701. # optional ::archive_name at the end, archive name must not contain "/".
  702. # borg mount's FUSE filesystem creates one level of directories from
  703. # the archive names and of course "/" is not valid in a directory name.
  704. optional_archive_re = r"""
  705. (?:
  706. :: # "::" as separator
  707. (?P<archive>[^/]+) # archive name must not contain "/"
  708. )?$""" # must match until the end
  709. # regexes for misc. kinds of supported location specifiers:
  710. ssh_re = re.compile(r"""
  711. (?P<proto>ssh):// # ssh://
  712. """ + optional_user_re + r""" # user@ (optional)
  713. (?P<host>[^:/]+)(?::(?P<port>\d+))? # host or host:port
  714. """ + path_re + optional_archive_re, re.VERBOSE) # path or path::archive
  715. file_re = re.compile(r"""
  716. (?P<proto>file):// # file://
  717. """ + path_re + optional_archive_re, re.VERBOSE) # path or path::archive
  718. # note: scp_re is also use for local pathes
  719. scp_re = re.compile(r"""
  720. (
  721. """ + optional_user_re + r""" # user@ (optional)
  722. (?P<host>[^:/]+): # host: (don't match / in host to disambiguate from file:)
  723. )? # user@host: part is optional
  724. """ + path_re + optional_archive_re, re.VERBOSE) # path with optional archive
  725. # get the repo from BORG_REPO env and the optional archive from param.
  726. # if the syntax requires giving REPOSITORY (see "borg mount"),
  727. # use "::" to let it use the env var.
  728. # if REPOSITORY argument is optional, it'll automatically use the env.
  729. env_re = re.compile(r""" # the repo part is fetched from BORG_REPO
  730. (?:::$) # just "::" is ok (when a pos. arg is required, no archive)
  731. | # or
  732. """ + optional_archive_re, re.VERBOSE) # archive name (optional, may be empty)
  733. def __init__(self, text=''):
  734. self.orig = text
  735. if not self.parse(self.orig):
  736. raise ValueError
  737. def parse(self, text):
  738. text = replace_placeholders(text)
  739. valid = self._parse(text)
  740. if valid:
  741. return True
  742. m = self.env_re.match(text)
  743. if not m:
  744. return False
  745. repo = os.environ.get('BORG_REPO')
  746. if repo is None:
  747. return False
  748. valid = self._parse(repo)
  749. if not valid:
  750. return False
  751. self.archive = m.group('archive')
  752. return True
  753. def _parse(self, text):
  754. def normpath_special(p):
  755. # avoid that normpath strips away our relative path hack and even makes p absolute
  756. relative = p.startswith('/./')
  757. p = os.path.normpath(p)
  758. return ('/.' + p) if relative else p
  759. m = self.ssh_re.match(text)
  760. if m:
  761. self.proto = m.group('proto')
  762. self.user = m.group('user')
  763. self.host = m.group('host')
  764. self.port = m.group('port') and int(m.group('port')) or None
  765. self.path = normpath_special(m.group('path'))
  766. self.archive = m.group('archive')
  767. return True
  768. m = self.file_re.match(text)
  769. if m:
  770. self.proto = m.group('proto')
  771. self.path = normpath_special(m.group('path'))
  772. self.archive = m.group('archive')
  773. return True
  774. m = self.scp_re.match(text)
  775. if m:
  776. self.user = m.group('user')
  777. self.host = m.group('host')
  778. self.path = normpath_special(m.group('path'))
  779. self.archive = m.group('archive')
  780. self.proto = self.host and 'ssh' or 'file'
  781. return True
  782. return False
  783. def __str__(self):
  784. items = [
  785. 'proto=%r' % self.proto,
  786. 'user=%r' % self.user,
  787. 'host=%r' % self.host,
  788. 'port=%r' % self.port,
  789. 'path=%r' % self.path,
  790. 'archive=%r' % self.archive,
  791. ]
  792. return ', '.join(items)
  793. def to_key_filename(self):
  794. name = re.sub('[^\w]', '_', self.path).strip('_')
  795. if self.proto != 'file':
  796. name = self.host + '__' + name
  797. return os.path.join(get_keys_dir(), name)
  798. def __repr__(self):
  799. return "Location(%s)" % self
  800. def canonical_path(self):
  801. if self.proto == 'file':
  802. return self.path
  803. else:
  804. if self.path and self.path.startswith('~'):
  805. path = '/' + self.path # /~/x = path x relative to home dir
  806. elif self.path and not self.path.startswith('/'):
  807. path = '/./' + self.path # /./x = path x relative to cwd
  808. else:
  809. path = self.path
  810. return 'ssh://{}{}{}{}'.format('{}@'.format(self.user) if self.user else '',
  811. self.host,
  812. ':{}'.format(self.port) if self.port else '',
  813. path)
  814. def location_validator(archive=None):
  815. def validator(text):
  816. try:
  817. loc = Location(text)
  818. except ValueError:
  819. raise argparse.ArgumentTypeError('Invalid location format: "%s"' % text) from None
  820. if archive is True and not loc.archive:
  821. raise argparse.ArgumentTypeError('"%s": No archive specified' % text)
  822. elif archive is False and loc.archive:
  823. raise argparse.ArgumentTypeError('"%s" No archive can be specified' % text)
  824. return loc
  825. return validator
  826. def archivename_validator():
  827. def validator(text):
  828. if '/' in text or '::' in text or not text:
  829. raise argparse.ArgumentTypeError('Invalid repository name: "%s"' % text)
  830. return text
  831. return validator
  832. def decode_dict(d, keys, encoding='utf-8', errors='surrogateescape'):
  833. for key in keys:
  834. if isinstance(d.get(key), bytes):
  835. d[key] = d[key].decode(encoding, errors)
  836. return d
  837. def remove_surrogates(s, errors='replace'):
  838. """Replace surrogates generated by fsdecode with '?'
  839. """
  840. return s.encode('utf-8', errors).decode('utf-8')
  841. _safe_re = re.compile(r'^((\.\.)?/+)+')
  842. def make_path_safe(path):
  843. """Make path safe by making it relative and local
  844. """
  845. return _safe_re.sub('', path) or '.'
  846. def daemonize():
  847. """Detach process from controlling terminal and run in background
  848. """
  849. pid = os.fork()
  850. if pid:
  851. os._exit(0)
  852. os.setsid()
  853. pid = os.fork()
  854. if pid:
  855. os._exit(0)
  856. os.chdir('/')
  857. os.close(0)
  858. os.close(1)
  859. os.close(2)
  860. fd = os.open('/dev/null', os.O_RDWR)
  861. os.dup2(fd, 0)
  862. os.dup2(fd, 1)
  863. os.dup2(fd, 2)
  864. class StableDict(dict):
  865. """A dict subclass with stable items() ordering"""
  866. def items(self):
  867. return sorted(super().items())
  868. def bigint_to_int(mtime):
  869. """Convert bytearray to int
  870. """
  871. if isinstance(mtime, bytes):
  872. return int.from_bytes(mtime, 'little', signed=True)
  873. return mtime
  874. def int_to_bigint(value):
  875. """Convert integers larger than 64 bits to bytearray
  876. Smaller integers are left alone
  877. """
  878. if value.bit_length() > 63:
  879. return value.to_bytes((value.bit_length() + 9) // 8, 'little', signed=True)
  880. return value
  881. def is_slow_msgpack():
  882. return msgpack.Packer is msgpack.fallback.Packer
  883. FALSISH = ('No', 'NO', 'no', 'N', 'n', '0', )
  884. TRUISH = ('Yes', 'YES', 'yes', 'Y', 'y', '1', )
  885. DEFAULTISH = ('Default', 'DEFAULT', 'default', 'D', 'd', '', )
  886. def yes(msg=None, false_msg=None, true_msg=None, default_msg=None,
  887. retry_msg=None, invalid_msg=None, env_msg='{} (from {})',
  888. falsish=FALSISH, truish=TRUISH, defaultish=DEFAULTISH,
  889. default=False, retry=True, env_var_override=None, ofile=None, input=input):
  890. """Output <msg> (usually a question) and let user input an answer.
  891. Qualifies the answer according to falsish, truish and defaultish as True, False or <default>.
  892. If it didn't qualify and retry is False (no retries wanted), return the default [which
  893. defaults to False]. If retry is True let user retry answering until answer is qualified.
  894. If env_var_override is given and this var is present in the environment, do not ask
  895. the user, but just use the env var contents as answer as if it was typed in.
  896. Otherwise read input from stdin and proceed as normal.
  897. If EOF is received instead an input or an invalid input without retry possibility,
  898. return default.
  899. :param msg: introducing message to output on ofile, no \n is added [None]
  900. :param retry_msg: retry message to output on ofile, no \n is added [None]
  901. :param false_msg: message to output before returning False [None]
  902. :param true_msg: message to output before returning True [None]
  903. :param default_msg: message to output before returning a <default> [None]
  904. :param invalid_msg: message to output after a invalid answer was given [None]
  905. :param env_msg: message to output when using input from env_var_override ['{} (from {})'],
  906. needs to have 2 placeholders for answer and env var name
  907. :param falsish: sequence of answers qualifying as False
  908. :param truish: sequence of answers qualifying as True
  909. :param defaultish: sequence of answers qualifying as <default>
  910. :param default: default return value (defaultish answer was given or no-answer condition) [False]
  911. :param retry: if True and input is incorrect, retry. Otherwise return default. [True]
  912. :param env_var_override: environment variable name [None]
  913. :param ofile: output stream [sys.stderr]
  914. :param input: input function [input from builtins]
  915. :return: boolean answer value, True or False
  916. """
  917. # note: we do not assign sys.stderr as default above, so it is
  918. # really evaluated NOW, not at function definition time.
  919. if ofile is None:
  920. ofile = sys.stderr
  921. if default not in (True, False):
  922. raise ValueError("invalid default value, must be True or False")
  923. if msg:
  924. print(msg, file=ofile, end='', flush=True)
  925. while True:
  926. answer = None
  927. if env_var_override:
  928. answer = os.environ.get(env_var_override)
  929. if answer is not None and env_msg:
  930. print(env_msg.format(answer, env_var_override), file=ofile)
  931. if answer is None:
  932. try:
  933. answer = input()
  934. except EOFError:
  935. # avoid defaultish[0], defaultish could be empty
  936. answer = truish[0] if default else falsish[0]
  937. if answer in defaultish:
  938. if default_msg:
  939. print(default_msg, file=ofile)
  940. return default
  941. if answer in truish:
  942. if true_msg:
  943. print(true_msg, file=ofile)
  944. return True
  945. if answer in falsish:
  946. if false_msg:
  947. print(false_msg, file=ofile)
  948. return False
  949. # if we get here, the answer was invalid
  950. if invalid_msg:
  951. print(invalid_msg, file=ofile)
  952. if not retry:
  953. return default
  954. if retry_msg:
  955. print(retry_msg, file=ofile, end='', flush=True)
  956. # in case we used an environment variable and it gave an invalid answer, do not use it again:
  957. env_var_override = None
  958. class ProgressIndicatorPercent:
  959. def __init__(self, total, step=5, start=0, same_line=False, msg="%3.0f%%", file=None):
  960. """
  961. Percentage-based progress indicator
  962. :param total: total amount of items
  963. :param step: step size in percent
  964. :param start: at which percent value to start
  965. :param same_line: if True, emit output always on same line
  966. :param msg: output message, must contain one %f placeholder for the percentage
  967. :param file: output file, default: sys.stderr
  968. """
  969. self.counter = 0 # 0 .. (total-1)
  970. self.total = total
  971. self.trigger_at = start # output next percentage value when reaching (at least) this
  972. self.step = step
  973. if file is None:
  974. file = sys.stderr
  975. self.file = file
  976. self.msg = msg
  977. self.same_line = same_line
  978. def progress(self, current=None):
  979. if current is not None:
  980. self.counter = current
  981. pct = self.counter * 100 / self.total
  982. self.counter += 1
  983. if pct >= self.trigger_at:
  984. self.trigger_at += self.step
  985. return pct
  986. def show(self, current=None):
  987. pct = self.progress(current)
  988. if pct is not None:
  989. return self.output(pct)
  990. def output(self, percent):
  991. print(self.msg % percent, file=self.file, end='\r' if self.same_line else '\n', flush=True)
  992. def finish(self):
  993. if self.same_line:
  994. print(" " * len(self.msg % 100.0), file=self.file, end='\r')
  995. class ProgressIndicatorEndless:
  996. def __init__(self, step=10, file=None):
  997. """
  998. Progress indicator (long row of dots)
  999. :param step: every Nth call, call the func
  1000. :param file: output file, default: sys.stderr
  1001. """
  1002. self.counter = 0 # call counter
  1003. self.triggered = 0 # increases 1 per trigger event
  1004. self.step = step # trigger every <step> calls
  1005. if file is None:
  1006. file = sys.stderr
  1007. self.file = file
  1008. def progress(self):
  1009. self.counter += 1
  1010. trigger = self.counter % self.step == 0
  1011. if trigger:
  1012. self.triggered += 1
  1013. return trigger
  1014. def show(self):
  1015. trigger = self.progress()
  1016. if trigger:
  1017. return self.output(self.triggered)
  1018. def output(self, triggered):
  1019. print('.', end='', file=self.file, flush=True)
  1020. def finish(self):
  1021. print(file=self.file)
  1022. def sysinfo():
  1023. info = []
  1024. info.append('Platform: %s' % (' '.join(platform.uname()), ))
  1025. if sys.platform.startswith('linux'):
  1026. info.append('Linux: %s %s %s' % platform.linux_distribution())
  1027. info.append('Borg: %s Python: %s %s' % (borg_version, platform.python_implementation(), platform.python_version()))
  1028. info.append('PID: %d CWD: %s' % (os.getpid(), os.getcwd()))
  1029. info.append('sys.argv: %r' % sys.argv)
  1030. info.append('SSH_ORIGINAL_COMMAND: %r' % os.environ.get('SSH_ORIGINAL_COMMAND'))
  1031. info.append('')
  1032. return '\n'.join(info)
  1033. def log_multi(*msgs, level=logging.INFO):
  1034. """
  1035. log multiple lines of text, each line by a separate logging call for cosmetic reasons
  1036. each positional argument may be a single or multiple lines (separated by newlines) of text.
  1037. """
  1038. lines = []
  1039. for msg in msgs:
  1040. lines.extend(msg.splitlines())
  1041. for line in lines:
  1042. logger.log(level, line)
  1043. class ErrorIgnoringTextIOWrapper(io.TextIOWrapper):
  1044. def read(self, n):
  1045. if not self.closed:
  1046. try:
  1047. return super().read(n)
  1048. except BrokenPipeError:
  1049. try:
  1050. super().close()
  1051. except OSError:
  1052. pass
  1053. return ''
  1054. def write(self, s):
  1055. if not self.closed:
  1056. try:
  1057. return super().write(s)
  1058. except BrokenPipeError:
  1059. try:
  1060. super().close()
  1061. except OSError:
  1062. pass
  1063. return len(s)
  1064. class SignalException(BaseException):
  1065. """base class for all signal-based exceptions"""
  1066. class SigHup(SignalException):
  1067. """raised on SIGHUP signal"""
  1068. class SigTerm(SignalException):
  1069. """raised on SIGTERM signal"""
  1070. @contextlib.contextmanager
  1071. def signal_handler(sig, handler):
  1072. """
  1073. when entering context, set up signal handler <handler> for signal <sig>.
  1074. when leaving context, restore original signal handler.
  1075. <sig> can bei either a str when giving a signal.SIGXXX attribute name (it
  1076. won't crash if the attribute name does not exist as some names are platform
  1077. specific) or a int, when giving a signal number.
  1078. <handler> is any handler value as accepted by the signal.signal(sig, handler).
  1079. """
  1080. if isinstance(sig, str):
  1081. sig = getattr(signal, sig, None)
  1082. if sig is not None:
  1083. orig_handler = signal.signal(sig, handler)
  1084. try:
  1085. yield
  1086. finally:
  1087. if sig is not None:
  1088. signal.signal(sig, orig_handler)
  1089. def clean_lines(lines, lstrip=None, rstrip=None, remove_empty=True, remove_comments=True):
  1090. """
  1091. clean lines (usually read from a config file):
  1092. 1. strip whitespace (left and right), 2. remove empty lines, 3. remove comments.
  1093. note: only "pure comment lines" are supported, no support for "trailing comments".
  1094. :param lines: input line iterator (e.g. list or open text file) that gives unclean input lines
  1095. :param lstrip: lstrip call arguments or False, if lstripping is not desired
  1096. :param rstrip: rstrip call arguments or False, if rstripping is not desired
  1097. :param remove_comments: remove comment lines (lines starting with "#")
  1098. :param remove_empty: remove empty lines
  1099. :return: yields processed lines
  1100. """
  1101. for line in lines:
  1102. if lstrip is not False:
  1103. line = line.lstrip(lstrip)
  1104. if rstrip is not False:
  1105. line = line.rstrip(rstrip)
  1106. if remove_empty and not line:
  1107. continue
  1108. if remove_comments and line.startswith('#'):
  1109. continue
  1110. yield line
  1111. def raising_signal_handler(exc_cls):
  1112. def handler(sig_no, frame):
  1113. # setting SIG_IGN avoids that an incoming second signal of this
  1114. # kind would raise a 2nd exception while we still process the
  1115. # exception handler for exc_cls for the 1st signal.
  1116. signal.signal(sig_no, signal.SIG_IGN)
  1117. raise exc_cls
  1118. return handler