helpers.py 44 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342
  1. import argparse
  2. from binascii import hexlify
  3. from collections import namedtuple, deque
  4. from functools import wraps, partial
  5. import grp
  6. import hashlib
  7. from itertools import islice
  8. import os
  9. import stat
  10. import textwrap
  11. import pwd
  12. import re
  13. from shutil import get_terminal_size
  14. import sys
  15. from string import Formatter
  16. import platform
  17. import time
  18. import unicodedata
  19. import logging
  20. from .logger import create_logger
  21. logger = create_logger()
  22. from datetime import datetime, timezone, timedelta
  23. from fnmatch import translate
  24. from operator import attrgetter
  25. from . import __version__ as borg_version
  26. from . import hashindex
  27. from . import chunker
  28. from .constants import * # NOQA
  29. from . import crypto
  30. from . import shellpattern
  31. import msgpack
  32. import msgpack.fallback
  33. import socket
  34. class Error(Exception):
  35. """Error base class"""
  36. # if we raise such an Error and it is only catched by the uppermost
  37. # exception handler (that exits short after with the given exit_code),
  38. # it is always a (fatal and abrupt) EXIT_ERROR, never just a warning.
  39. exit_code = EXIT_ERROR
  40. # show a traceback?
  41. traceback = False
  42. def get_message(self):
  43. return type(self).__doc__.format(*self.args)
  44. class ErrorWithTraceback(Error):
  45. """like Error, but show a traceback also"""
  46. traceback = True
  47. class IntegrityError(ErrorWithTraceback):
  48. """Data integrity error"""
  49. class ExtensionModuleError(Error):
  50. """The Borg binary extension modules do not seem to be properly installed"""
  51. def check_extension_modules():
  52. from . import platform
  53. if hashindex.API_VERSION != 2:
  54. raise ExtensionModuleError
  55. if chunker.API_VERSION != 2:
  56. raise ExtensionModuleError
  57. if crypto.API_VERSION != 2:
  58. raise ExtensionModuleError
  59. if platform.API_VERSION != 2:
  60. raise ExtensionModuleError
  61. class Manifest:
  62. MANIFEST_ID = b'\0' * 32
  63. def __init__(self, key, repository):
  64. self.archives = {}
  65. self.config = {}
  66. self.key = key
  67. self.repository = repository
  68. @classmethod
  69. def load(cls, repository, key=None):
  70. from .key import key_factory
  71. cdata = repository.get(cls.MANIFEST_ID)
  72. if not key:
  73. key = key_factory(repository, cdata)
  74. manifest = cls(key, repository)
  75. data = key.decrypt(None, cdata)
  76. manifest.id = key.id_hash(data)
  77. m = msgpack.unpackb(data)
  78. if not m.get(b'version') == 1:
  79. raise ValueError('Invalid manifest version')
  80. manifest.archives = dict((k.decode('utf-8'), v) for k, v in m[b'archives'].items())
  81. manifest.timestamp = m.get(b'timestamp')
  82. if manifest.timestamp:
  83. manifest.timestamp = manifest.timestamp.decode('ascii')
  84. manifest.config = m[b'config']
  85. return manifest, key
  86. def write(self):
  87. self.timestamp = datetime.utcnow().isoformat()
  88. data = msgpack.packb(StableDict({
  89. 'version': 1,
  90. 'archives': self.archives,
  91. 'timestamp': self.timestamp,
  92. 'config': self.config,
  93. }))
  94. self.id = self.key.id_hash(data)
  95. self.repository.put(self.MANIFEST_ID, self.key.encrypt(data))
  96. def list_archive_infos(self, sort_by=None, reverse=False):
  97. # inexpensive Archive.list_archives replacement if we just need .name, .id, .ts
  98. ArchiveInfo = namedtuple('ArchiveInfo', 'name id ts')
  99. archives = []
  100. for name, values in self.archives.items():
  101. ts = parse_timestamp(values[b'time'].decode('utf-8'))
  102. id = values[b'id']
  103. archives.append(ArchiveInfo(name=name, id=id, ts=ts))
  104. if sort_by is not None:
  105. archives = sorted(archives, key=attrgetter(sort_by), reverse=reverse)
  106. return archives
  107. def prune_within(archives, within):
  108. multiplier = {'H': 1, 'd': 24, 'w': 24 * 7, 'm': 24 * 31, 'y': 24 * 365}
  109. try:
  110. hours = int(within[:-1]) * multiplier[within[-1]]
  111. except (KeyError, ValueError):
  112. # I don't like how this displays the original exception too:
  113. raise argparse.ArgumentTypeError('Unable to parse --within option: "%s"' % within)
  114. if hours <= 0:
  115. raise argparse.ArgumentTypeError('Number specified using --within option must be positive')
  116. target = datetime.now(timezone.utc) - timedelta(seconds=hours * 3600)
  117. return [a for a in archives if a.ts > target]
  118. def prune_split(archives, pattern, n, skip=[]):
  119. last = None
  120. keep = []
  121. if n == 0:
  122. return keep
  123. for a in sorted(archives, key=attrgetter('ts'), reverse=True):
  124. period = to_localtime(a.ts).strftime(pattern)
  125. if period != last:
  126. last = period
  127. if a not in skip:
  128. keep.append(a)
  129. if len(keep) == n:
  130. break
  131. return keep
  132. class Statistics:
  133. def __init__(self):
  134. self.osize = self.csize = self.usize = self.nfiles = 0
  135. self.last_progress = 0 # timestamp when last progress was shown
  136. def update(self, size, csize, unique):
  137. self.osize += size
  138. self.csize += csize
  139. if unique:
  140. self.usize += csize
  141. summary = """\
  142. Original size Compressed size Deduplicated size
  143. {label:15} {stats.osize_fmt:>20s} {stats.csize_fmt:>20s} {stats.usize_fmt:>20s}"""
  144. def __str__(self):
  145. return self.summary.format(stats=self, label='This archive:')
  146. def __repr__(self):
  147. return "<{cls} object at {hash:#x} ({self.osize}, {self.csize}, {self.usize})>".format(cls=type(self).__name__, hash=id(self), self=self)
  148. @property
  149. def osize_fmt(self):
  150. return format_file_size(self.osize)
  151. @property
  152. def usize_fmt(self):
  153. return format_file_size(self.usize)
  154. @property
  155. def csize_fmt(self):
  156. return format_file_size(self.csize)
  157. def show_progress(self, item=None, final=False, stream=None, dt=None):
  158. now = time.time()
  159. if dt is None or now - self.last_progress > dt:
  160. self.last_progress = now
  161. columns, lines = get_terminal_size()
  162. if not final:
  163. msg = '{0.osize_fmt} O {0.csize_fmt} C {0.usize_fmt} D {0.nfiles} N '.format(self)
  164. path = remove_surrogates(item[b'path']) if item else ''
  165. space = columns - len(msg)
  166. if space < len('...') + len(path):
  167. path = '%s...%s' % (path[:(space // 2) - len('...')], path[-space // 2:])
  168. msg += "{0:<{space}}".format(path, space=space)
  169. else:
  170. msg = ' ' * columns
  171. print(msg, file=stream or sys.stderr, end="\r", flush=True)
  172. def get_home_dir():
  173. """Get user's home directory while preferring a possibly set HOME
  174. environment variable
  175. """
  176. # os.path.expanduser() behaves differently for '~' and '~someuser' as
  177. # parameters: when called with an explicit username, the possibly set
  178. # environment variable HOME is no longer respected. So we have to check if
  179. # it is set and only expand the user's home directory if HOME is unset.
  180. if os.environ.get('HOME', ''):
  181. return os.environ.get('HOME')
  182. else:
  183. return os.path.expanduser('~%s' % os.environ.get('USER', ''))
  184. def get_keys_dir():
  185. """Determine where to repository keys and cache"""
  186. xdg_config = os.environ.get('XDG_CONFIG_HOME', os.path.join(get_home_dir(), '.config'))
  187. keys_dir = os.environ.get('BORG_KEYS_DIR', os.path.join(xdg_config, 'borg', 'keys'))
  188. if not os.path.exists(keys_dir):
  189. os.makedirs(keys_dir)
  190. os.chmod(keys_dir, stat.S_IRWXU)
  191. return keys_dir
  192. def get_cache_dir():
  193. """Determine where to repository keys and cache"""
  194. xdg_cache = os.environ.get('XDG_CACHE_HOME', os.path.join(get_home_dir(), '.cache'))
  195. cache_dir = os.environ.get('BORG_CACHE_DIR', os.path.join(xdg_cache, 'borg'))
  196. if not os.path.exists(cache_dir):
  197. os.makedirs(cache_dir)
  198. os.chmod(cache_dir, stat.S_IRWXU)
  199. with open(os.path.join(cache_dir, CACHE_TAG_NAME), 'wb') as fd:
  200. fd.write(CACHE_TAG_CONTENTS)
  201. fd.write(textwrap.dedent("""
  202. # This file is a cache directory tag created by Borg.
  203. # For information about cache directory tags, see:
  204. # http://www.brynosaurus.com/cachedir/
  205. """).encode('ascii'))
  206. return cache_dir
  207. def to_localtime(ts):
  208. """Convert datetime object from UTC to local time zone"""
  209. return datetime(*time.localtime((ts - datetime(1970, 1, 1, tzinfo=timezone.utc)).total_seconds())[:6])
  210. def parse_timestamp(timestamp):
  211. """Parse a ISO 8601 timestamp string"""
  212. if '.' in timestamp: # microseconds might not be present
  213. return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%f').replace(tzinfo=timezone.utc)
  214. else:
  215. return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc)
  216. def load_excludes(fh):
  217. """Load and parse exclude patterns from file object. Lines empty or starting with '#' after stripping whitespace on
  218. both line ends are ignored.
  219. """
  220. patterns = (line for line in (i.strip() for i in fh) if not line.startswith('#'))
  221. return [parse_pattern(pattern) for pattern in patterns if pattern]
  222. def update_excludes(args):
  223. """Merge exclude patterns from files with those on command line."""
  224. if hasattr(args, 'exclude_files') and args.exclude_files:
  225. if not hasattr(args, 'excludes') or args.excludes is None:
  226. args.excludes = []
  227. for file in args.exclude_files:
  228. args.excludes += load_excludes(file)
  229. file.close()
  230. class PatternMatcher:
  231. def __init__(self, fallback=None):
  232. self._items = []
  233. # Value to return from match function when none of the patterns match.
  234. self.fallback = fallback
  235. def empty(self):
  236. return not len(self._items)
  237. def add(self, patterns, value):
  238. """Add list of patterns to internal list. The given value is returned from the match function when one of the
  239. given patterns matches.
  240. """
  241. self._items.extend((i, value) for i in patterns)
  242. def match(self, path):
  243. for (pattern, value) in self._items:
  244. if pattern.match(path):
  245. return value
  246. return self.fallback
  247. def normalized(func):
  248. """ Decorator for the Pattern match methods, returning a wrapper that
  249. normalizes OSX paths to match the normalized pattern on OSX, and
  250. returning the original method on other platforms"""
  251. @wraps(func)
  252. def normalize_wrapper(self, path):
  253. return func(self, unicodedata.normalize("NFD", path))
  254. if sys.platform in ('darwin',):
  255. # HFS+ converts paths to a canonical form, so users shouldn't be
  256. # required to enter an exact match
  257. return normalize_wrapper
  258. else:
  259. # Windows and Unix filesystems allow different forms, so users
  260. # always have to enter an exact match
  261. return func
  262. class PatternBase:
  263. """Shared logic for inclusion/exclusion patterns.
  264. """
  265. PREFIX = NotImplemented
  266. def __init__(self, pattern):
  267. self.pattern_orig = pattern
  268. self.match_count = 0
  269. if sys.platform in ('darwin',):
  270. pattern = unicodedata.normalize("NFD", pattern)
  271. self._prepare(pattern)
  272. @normalized
  273. def match(self, path):
  274. matches = self._match(path)
  275. if matches:
  276. self.match_count += 1
  277. return matches
  278. def __repr__(self):
  279. return '%s(%s)' % (type(self), self.pattern)
  280. def __str__(self):
  281. return self.pattern_orig
  282. def _prepare(self, pattern):
  283. raise NotImplementedError
  284. def _match(self, path):
  285. raise NotImplementedError
  286. # For PathPrefixPattern, FnmatchPattern and ShellPattern, we require that the pattern either match the whole path
  287. # or an initial segment of the path up to but not including a path separator. To unify the two cases, we add a path
  288. # separator to the end of the path before matching.
  289. class PathPrefixPattern(PatternBase):
  290. """Literal files or directories listed on the command line
  291. for some operations (e.g. extract, but not create).
  292. If a directory is specified, all paths that start with that
  293. path match as well. A trailing slash makes no difference.
  294. """
  295. PREFIX = "pp"
  296. def _prepare(self, pattern):
  297. self.pattern = os.path.normpath(pattern).rstrip(os.path.sep) + os.path.sep
  298. def _match(self, path):
  299. return (path + os.path.sep).startswith(self.pattern)
  300. class FnmatchPattern(PatternBase):
  301. """Shell glob patterns to exclude. A trailing slash means to
  302. exclude the contents of a directory, but not the directory itself.
  303. """
  304. PREFIX = "fm"
  305. def _prepare(self, pattern):
  306. if pattern.endswith(os.path.sep):
  307. pattern = os.path.normpath(pattern).rstrip(os.path.sep) + os.path.sep + '*' + os.path.sep
  308. else:
  309. pattern = os.path.normpath(pattern) + os.path.sep + '*'
  310. self.pattern = pattern
  311. # fnmatch and re.match both cache compiled regular expressions.
  312. # Nevertheless, this is about 10 times faster.
  313. self.regex = re.compile(translate(self.pattern))
  314. def _match(self, path):
  315. return (self.regex.match(path + os.path.sep) is not None)
  316. class ShellPattern(PatternBase):
  317. """Shell glob patterns to exclude. A trailing slash means to
  318. exclude the contents of a directory, but not the directory itself.
  319. """
  320. PREFIX = "sh"
  321. def _prepare(self, pattern):
  322. sep = os.path.sep
  323. if pattern.endswith(sep):
  324. pattern = os.path.normpath(pattern).rstrip(sep) + sep + "**" + sep + "*" + sep
  325. else:
  326. pattern = os.path.normpath(pattern) + sep + "**" + sep + "*"
  327. self.pattern = pattern
  328. self.regex = re.compile(shellpattern.translate(self.pattern))
  329. def _match(self, path):
  330. return (self.regex.match(path + os.path.sep) is not None)
  331. class RegexPattern(PatternBase):
  332. """Regular expression to exclude.
  333. """
  334. PREFIX = "re"
  335. def _prepare(self, pattern):
  336. self.pattern = pattern
  337. self.regex = re.compile(pattern)
  338. def _match(self, path):
  339. # Normalize path separators
  340. if os.path.sep != '/':
  341. path = path.replace(os.path.sep, '/')
  342. return (self.regex.search(path) is not None)
  343. _PATTERN_STYLES = set([
  344. FnmatchPattern,
  345. PathPrefixPattern,
  346. RegexPattern,
  347. ShellPattern,
  348. ])
  349. _PATTERN_STYLE_BY_PREFIX = dict((i.PREFIX, i) for i in _PATTERN_STYLES)
  350. def parse_pattern(pattern, fallback=FnmatchPattern):
  351. """Read pattern from string and return an instance of the appropriate implementation class.
  352. """
  353. if len(pattern) > 2 and pattern[2] == ":" and pattern[:2].isalnum():
  354. (style, pattern) = (pattern[:2], pattern[3:])
  355. cls = _PATTERN_STYLE_BY_PREFIX.get(style, None)
  356. if cls is None:
  357. raise ValueError("Unknown pattern style: {}".format(style))
  358. else:
  359. cls = fallback
  360. return cls(pattern)
  361. def timestamp(s):
  362. """Convert a --timestamp=s argument to a datetime object"""
  363. try:
  364. # is it pointing to a file / directory?
  365. ts = os.stat(s).st_mtime
  366. return datetime.utcfromtimestamp(ts)
  367. except OSError:
  368. # didn't work, try parsing as timestamp. UTC, no TZ, no microsecs support.
  369. for format in ('%Y-%m-%dT%H:%M:%SZ', '%Y-%m-%dT%H:%M:%S+00:00',
  370. '%Y-%m-%dT%H:%M:%S', '%Y-%m-%d %H:%M:%S',
  371. '%Y-%m-%dT%H:%M', '%Y-%m-%d %H:%M',
  372. '%Y-%m-%d', '%Y-%j',
  373. ):
  374. try:
  375. return datetime.strptime(s, format)
  376. except ValueError:
  377. continue
  378. raise ValueError
  379. def ChunkerParams(s):
  380. if s.strip().lower() == "default":
  381. return CHUNKER_PARAMS
  382. chunk_min, chunk_max, chunk_mask, window_size = s.split(',')
  383. if int(chunk_max) > 23:
  384. # do not go beyond 2**23 (8MB) chunk size now,
  385. # COMPR_BUFFER can only cope with up to this size
  386. raise ValueError('max. chunk size exponent must not be more than 23 (2^23 = 8MiB max. chunk size)')
  387. return int(chunk_min), int(chunk_max), int(chunk_mask), int(window_size)
  388. def CompressionSpec(s):
  389. values = s.split(',')
  390. count = len(values)
  391. if count < 1:
  392. raise ValueError
  393. # --compression algo[,level]
  394. name = values[0]
  395. if name in ('none', 'lz4', ):
  396. return dict(name=name)
  397. if name in ('zlib', 'lzma', ):
  398. if count < 2:
  399. level = 6 # default compression level in py stdlib
  400. elif count == 2:
  401. level = int(values[1])
  402. if not 0 <= level <= 9:
  403. raise ValueError
  404. else:
  405. raise ValueError
  406. return dict(name=name, level=level)
  407. raise ValueError
  408. def dir_is_cachedir(path):
  409. """Determines whether the specified path is a cache directory (and
  410. therefore should potentially be excluded from the backup) according to
  411. the CACHEDIR.TAG protocol
  412. (http://www.brynosaurus.com/cachedir/spec.html).
  413. """
  414. tag_path = os.path.join(path, CACHE_TAG_NAME)
  415. try:
  416. if os.path.exists(tag_path):
  417. with open(tag_path, 'rb') as tag_file:
  418. tag_data = tag_file.read(len(CACHE_TAG_CONTENTS))
  419. if tag_data == CACHE_TAG_CONTENTS:
  420. return True
  421. except OSError:
  422. pass
  423. return False
  424. def dir_is_tagged(path, exclude_caches, exclude_if_present):
  425. """Determines whether the specified path is excluded by being a cache
  426. directory or containing user-specified tag files. Returns a list of the
  427. paths of the tag files (either CACHEDIR.TAG or the matching
  428. user-specified files).
  429. """
  430. tag_paths = []
  431. if exclude_caches and dir_is_cachedir(path):
  432. tag_paths.append(os.path.join(path, CACHE_TAG_NAME))
  433. if exclude_if_present is not None:
  434. for tag in exclude_if_present:
  435. tag_path = os.path.join(path, tag)
  436. if os.path.isfile(tag_path):
  437. tag_paths.append(tag_path)
  438. return tag_paths
  439. def partial_format(format, mapping):
  440. """
  441. Apply format.format_map(mapping) while preserving unknown keys
  442. Does not support attribute access, indexing and ![rsa] conversions
  443. """
  444. for key, value in mapping.items():
  445. key = re.escape(key)
  446. format = re.sub(r'(?<!\{)((\{%s\})|(\{%s:[^\}]*\}))' % (key, key),
  447. lambda match: match.group(1).format_map(mapping),
  448. format)
  449. return format
  450. def format_line(format, data):
  451. # TODO: Filter out unwanted properties of str.format(), because "format" is user provided.
  452. try:
  453. return format.format(**data)
  454. except (KeyError, ValueError) as e:
  455. # this should catch format errors
  456. print('Error in lineformat: "{}" - reason "{}"'.format(format, str(e)))
  457. except Exception as e:
  458. # something unexpected, print error and raise exception
  459. print('Error in lineformat: "{}" - reason "{}"'.format(format, str(e)))
  460. raise
  461. return ''
  462. def safe_timestamp(item_timestamp_ns):
  463. try:
  464. return datetime.fromtimestamp(bigint_to_int(item_timestamp_ns) / 1e9)
  465. except OverflowError:
  466. # likely a broken file time and datetime did not want to go beyond year 9999
  467. return datetime(9999, 12, 31, 23, 59, 59)
  468. def format_time(t):
  469. """use ISO-8601 date and time format
  470. """
  471. return t.strftime('%a, %Y-%m-%d %H:%M:%S')
  472. def format_timedelta(td):
  473. """Format timedelta in a human friendly format
  474. """
  475. # Since td.total_seconds() requires python 2.7
  476. ts = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6)
  477. s = ts % 60
  478. m = int(ts / 60) % 60
  479. h = int(ts / 3600) % 24
  480. txt = '%.2f seconds' % s
  481. if m:
  482. txt = '%d minutes %s' % (m, txt)
  483. if h:
  484. txt = '%d hours %s' % (h, txt)
  485. if td.days:
  486. txt = '%d days %s' % (td.days, txt)
  487. return txt
  488. def format_file_size(v, precision=2, sign=False):
  489. """Format file size into a human friendly format
  490. """
  491. return sizeof_fmt_decimal(v, suffix='B', sep=' ', precision=precision, sign=sign)
  492. def sizeof_fmt(num, suffix='B', units=None, power=None, sep='', precision=2, sign=False):
  493. prefix = '+' if sign and num > 0 else ''
  494. for unit in units[:-1]:
  495. if abs(round(num, precision)) < power:
  496. if isinstance(num, int):
  497. return "{}{}{}{}{}".format(prefix, num, sep, unit, suffix)
  498. else:
  499. return "{}{:3.{}f}{}{}{}".format(prefix, num, precision, sep, unit, suffix)
  500. num /= float(power)
  501. return "{}{:.{}f}{}{}{}".format(prefix, num, precision, sep, units[-1], suffix)
  502. def sizeof_fmt_iec(num, suffix='B', sep='', precision=2, sign=False):
  503. return sizeof_fmt(num, suffix=suffix, sep=sep, precision=precision, sign=sign,
  504. units=['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'], power=1024)
  505. def sizeof_fmt_decimal(num, suffix='B', sep='', precision=2, sign=False):
  506. return sizeof_fmt(num, suffix=suffix, sep=sep, precision=precision, sign=sign,
  507. units=['', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y'], power=1000)
  508. def format_archive(archive):
  509. return '%-36s %s [%s]' % (
  510. archive.name,
  511. format_time(to_localtime(archive.ts)),
  512. hexlify(archive.id).decode('ascii'),
  513. )
  514. def memoize(function):
  515. cache = {}
  516. def decorated_function(*args):
  517. try:
  518. return cache[args]
  519. except KeyError:
  520. val = function(*args)
  521. cache[args] = val
  522. return val
  523. return decorated_function
  524. @memoize
  525. def uid2user(uid, default=None):
  526. try:
  527. return pwd.getpwuid(uid).pw_name
  528. except KeyError:
  529. return default
  530. @memoize
  531. def user2uid(user, default=None):
  532. try:
  533. return user and pwd.getpwnam(user).pw_uid
  534. except KeyError:
  535. return default
  536. @memoize
  537. def gid2group(gid, default=None):
  538. try:
  539. return grp.getgrgid(gid).gr_name
  540. except KeyError:
  541. return default
  542. @memoize
  543. def group2gid(group, default=None):
  544. try:
  545. return group and grp.getgrnam(group).gr_gid
  546. except KeyError:
  547. return default
  548. def posix_acl_use_stored_uid_gid(acl):
  549. """Replace the user/group field with the stored uid/gid
  550. """
  551. entries = []
  552. for entry in safe_decode(acl).split('\n'):
  553. if entry:
  554. fields = entry.split(':')
  555. if len(fields) == 4:
  556. entries.append(':'.join([fields[0], fields[3], fields[2]]))
  557. else:
  558. entries.append(entry)
  559. return safe_encode('\n'.join(entries))
  560. def safe_decode(s, coding='utf-8', errors='surrogateescape'):
  561. """decode bytes to str, with round-tripping "invalid" bytes"""
  562. return s.decode(coding, errors)
  563. def safe_encode(s, coding='utf-8', errors='surrogateescape'):
  564. """encode str to bytes, with round-tripping "invalid" bytes"""
  565. return s.encode(coding, errors)
  566. class Location:
  567. """Object representing a repository / archive location
  568. """
  569. proto = user = host = port = path = archive = None
  570. # borg mount's FUSE filesystem creates one level of directories from
  571. # the archive names. Thus, we must not accept "/" in archive names.
  572. ssh_re = re.compile(r'(?P<proto>ssh)://(?:(?P<user>[^@]+)@)?'
  573. r'(?P<host>[^:/#]+)(?::(?P<port>\d+))?'
  574. r'(?P<path>[^:]+)(?:::(?P<archive>[^/]+))?$')
  575. file_re = re.compile(r'(?P<proto>file)://'
  576. r'(?P<path>[^:]+)(?:::(?P<archive>[^/]+))?$')
  577. scp_re = re.compile(r'((?:(?P<user>[^@]+)@)?(?P<host>[^:/]+):)?'
  578. r'(?P<path>[^:]+)(?:::(?P<archive>[^/]+))?$')
  579. # get the repo from BORG_RE env and the optional archive from param.
  580. # if the syntax requires giving REPOSITORY (see "borg mount"),
  581. # use "::" to let it use the env var.
  582. # if REPOSITORY argument is optional, it'll automatically use the env.
  583. env_re = re.compile(r'(?:::(?P<archive>[^/]+)?)?$')
  584. def __init__(self, text=''):
  585. self.orig = text
  586. if not self.parse(self.orig):
  587. raise ValueError
  588. def preformat_text(self, text):
  589. """Format repository and archive path with common tags"""
  590. current_time = datetime.now()
  591. data = {
  592. 'pid': os.getpid(),
  593. 'fqdn': socket.getfqdn(),
  594. 'hostname': socket.gethostname(),
  595. 'now': current_time.now(),
  596. 'utcnow': current_time.utcnow(),
  597. 'user': uid2user(os.getuid(), os.getuid())
  598. }
  599. return format_line(text, data)
  600. def parse(self, text):
  601. text = self.preformat_text(text)
  602. valid = self._parse(text)
  603. if valid:
  604. return True
  605. m = self.env_re.match(text)
  606. if not m:
  607. return False
  608. repo = os.environ.get('BORG_REPO')
  609. if repo is None:
  610. return False
  611. valid = self._parse(repo)
  612. if not valid:
  613. return False
  614. self.archive = m.group('archive')
  615. return True
  616. def _parse(self, text):
  617. m = self.ssh_re.match(text)
  618. if m:
  619. self.proto = m.group('proto')
  620. self.user = m.group('user')
  621. self.host = m.group('host')
  622. self.port = m.group('port') and int(m.group('port')) or None
  623. self.path = os.path.normpath(m.group('path'))
  624. self.archive = m.group('archive')
  625. return True
  626. m = self.file_re.match(text)
  627. if m:
  628. self.proto = m.group('proto')
  629. self.path = os.path.normpath(m.group('path'))
  630. self.archive = m.group('archive')
  631. return True
  632. m = self.scp_re.match(text)
  633. if m:
  634. self.user = m.group('user')
  635. self.host = m.group('host')
  636. self.path = os.path.normpath(m.group('path'))
  637. self.archive = m.group('archive')
  638. self.proto = self.host and 'ssh' or 'file'
  639. return True
  640. return False
  641. def __str__(self):
  642. items = [
  643. 'proto=%r' % self.proto,
  644. 'user=%r' % self.user,
  645. 'host=%r' % self.host,
  646. 'port=%r' % self.port,
  647. 'path=%r' % self.path,
  648. 'archive=%r' % self.archive,
  649. ]
  650. return ', '.join(items)
  651. def to_key_filename(self):
  652. name = re.sub('[^\w]', '_', self.path).strip('_')
  653. if self.proto != 'file':
  654. name = self.host + '__' + name
  655. return os.path.join(get_keys_dir(), name)
  656. def __repr__(self):
  657. return "Location(%s)" % self
  658. def canonical_path(self):
  659. if self.proto == 'file':
  660. return self.path
  661. else:
  662. if self.path and self.path.startswith('~'):
  663. path = '/' + self.path
  664. elif self.path and not self.path.startswith('/'):
  665. path = '/~/' + self.path
  666. else:
  667. path = self.path
  668. return 'ssh://{}{}{}{}'.format('{}@'.format(self.user) if self.user else '',
  669. self.host,
  670. ':{}'.format(self.port) if self.port else '',
  671. path)
  672. def location_validator(archive=None):
  673. def validator(text):
  674. try:
  675. loc = Location(text)
  676. except ValueError:
  677. raise argparse.ArgumentTypeError('Invalid location format: "%s"' % text) from None
  678. if archive is True and not loc.archive:
  679. raise argparse.ArgumentTypeError('"%s": No archive specified' % text)
  680. elif archive is False and loc.archive:
  681. raise argparse.ArgumentTypeError('"%s" No archive can be specified' % text)
  682. return loc
  683. return validator
  684. def archivename_validator():
  685. def validator(text):
  686. if '/' in text or '::' in text or not text:
  687. raise argparse.ArgumentTypeError('Invalid repository name: "%s"' % text)
  688. return text
  689. return validator
  690. def decode_dict(d, keys, encoding='utf-8', errors='surrogateescape'):
  691. for key in keys:
  692. if isinstance(d.get(key), bytes):
  693. d[key] = d[key].decode(encoding, errors)
  694. return d
  695. def remove_surrogates(s, errors='replace'):
  696. """Replace surrogates generated by fsdecode with '?'
  697. """
  698. return s.encode('utf-8', errors).decode('utf-8')
  699. _safe_re = re.compile(r'^((\.\.)?/+)+')
  700. def make_path_safe(path):
  701. """Make path safe by making it relative and local
  702. """
  703. return _safe_re.sub('', path) or '.'
  704. def daemonize():
  705. """Detach process from controlling terminal and run in background
  706. """
  707. pid = os.fork()
  708. if pid:
  709. os._exit(0)
  710. os.setsid()
  711. pid = os.fork()
  712. if pid:
  713. os._exit(0)
  714. os.chdir('/')
  715. os.close(0)
  716. os.close(1)
  717. os.close(2)
  718. fd = os.open('/dev/null', os.O_RDWR)
  719. os.dup2(fd, 0)
  720. os.dup2(fd, 1)
  721. os.dup2(fd, 2)
  722. class StableDict(dict):
  723. """A dict subclass with stable items() ordering"""
  724. def items(self):
  725. return sorted(super().items())
  726. def bigint_to_int(mtime):
  727. """Convert bytearray to int
  728. """
  729. if isinstance(mtime, bytes):
  730. return int.from_bytes(mtime, 'little', signed=True)
  731. return mtime
  732. def int_to_bigint(value):
  733. """Convert integers larger than 64 bits to bytearray
  734. Smaller integers are left alone
  735. """
  736. if value.bit_length() > 63:
  737. return value.to_bytes((value.bit_length() + 9) // 8, 'little', signed=True)
  738. return value
  739. def is_slow_msgpack():
  740. return msgpack.Packer is msgpack.fallback.Packer
  741. FALSISH = ('No', 'NO', 'no', 'N', 'n', '0', )
  742. TRUISH = ('Yes', 'YES', 'yes', 'Y', 'y', '1', )
  743. DEFAULTISH = ('Default', 'DEFAULT', 'default', 'D', 'd', '', )
  744. def yes(msg=None, false_msg=None, true_msg=None, default_msg=None,
  745. retry_msg=None, invalid_msg=None, env_msg=None,
  746. falsish=FALSISH, truish=TRUISH, defaultish=DEFAULTISH,
  747. default=False, retry=True, env_var_override=None, ofile=None, input=input):
  748. """
  749. Output <msg> (usually a question) and let user input an answer.
  750. Qualifies the answer according to falsish, truish and defaultish as True, False or <default>.
  751. If it didn't qualify and retry_msg is None (no retries wanted),
  752. return the default [which defaults to False]. Otherwise let user retry
  753. answering until answer is qualified.
  754. If env_var_override is given and this var is present in the environment, do not ask
  755. the user, but just use the env var contents as answer as if it was typed in.
  756. Otherwise read input from stdin and proceed as normal.
  757. If EOF is received instead an input or an invalid input without retry possibility,
  758. return default.
  759. :param msg: introducing message to output on ofile, no \n is added [None]
  760. :param retry_msg: retry message to output on ofile, no \n is added [None]
  761. :param false_msg: message to output before returning False [None]
  762. :param true_msg: message to output before returning True [None]
  763. :param default_msg: message to output before returning a <default> [None]
  764. :param invalid_msg: message to output after a invalid answer was given [None]
  765. :param env_msg: message to output when using input from env_var_override [None],
  766. needs to have 2 placeholders for answer and env var name, e.g.: "{} (from {})"
  767. :param falsish: sequence of answers qualifying as False
  768. :param truish: sequence of answers qualifying as True
  769. :param defaultish: sequence of answers qualifying as <default>
  770. :param default: default return value (defaultish answer was given or no-answer condition) [False]
  771. :param retry: if True and input is incorrect, retry. Otherwise return default. [True]
  772. :param env_var_override: environment variable name [None]
  773. :param ofile: output stream [sys.stderr]
  774. :param input: input function [input from builtins]
  775. :return: boolean answer value, True or False
  776. """
  777. # note: we do not assign sys.stderr as default above, so it is
  778. # really evaluated NOW, not at function definition time.
  779. if ofile is None:
  780. ofile = sys.stderr
  781. if default not in (True, False):
  782. raise ValueError("invalid default value, must be True or False")
  783. if msg:
  784. print(msg, file=ofile, end='', flush=True)
  785. while True:
  786. answer = None
  787. if env_var_override:
  788. answer = os.environ.get(env_var_override)
  789. if answer is not None and env_msg:
  790. print(env_msg.format(answer, env_var_override), file=ofile)
  791. if answer is None:
  792. try:
  793. answer = input()
  794. except EOFError:
  795. # avoid defaultish[0], defaultish could be empty
  796. answer = truish[0] if default else falsish[0]
  797. if answer in defaultish:
  798. if default_msg:
  799. print(default_msg, file=ofile)
  800. return default
  801. if answer in truish:
  802. if true_msg:
  803. print(true_msg, file=ofile)
  804. return True
  805. if answer in falsish:
  806. if false_msg:
  807. print(false_msg, file=ofile)
  808. return False
  809. # if we get here, the answer was invalid
  810. if invalid_msg:
  811. print(invalid_msg, file=ofile)
  812. if not retry:
  813. return default
  814. if retry_msg:
  815. print(retry_msg, file=ofile, end='', flush=True)
  816. # in case we used an environment variable and it gave an invalid answer, do not use it again:
  817. env_var_override = None
  818. class ProgressIndicatorPercent:
  819. def __init__(self, total, step=5, start=0, same_line=False, msg="%3.0f%%", file=None):
  820. """
  821. Percentage-based progress indicator
  822. :param total: total amount of items
  823. :param step: step size in percent
  824. :param start: at which percent value to start
  825. :param same_line: if True, emit output always on same line
  826. :param msg: output message, must contain one %f placeholder for the percentage
  827. :param file: output file, default: sys.stderr
  828. """
  829. self.counter = 0 # 0 .. (total-1)
  830. self.total = total
  831. self.trigger_at = start # output next percentage value when reaching (at least) this
  832. self.step = step
  833. if file is None:
  834. file = sys.stderr
  835. self.file = file
  836. self.msg = msg
  837. self.same_line = same_line
  838. def progress(self, current=None):
  839. if current is not None:
  840. self.counter = current
  841. pct = self.counter * 100 / self.total
  842. self.counter += 1
  843. if pct >= self.trigger_at:
  844. self.trigger_at += self.step
  845. return pct
  846. def show(self, current=None):
  847. pct = self.progress(current)
  848. if pct is not None:
  849. return self.output(pct)
  850. def output(self, percent):
  851. print(self.msg % percent, file=self.file, end='\r' if self.same_line else '\n', flush=True)
  852. def finish(self):
  853. if self.same_line:
  854. print(" " * len(self.msg % 100.0), file=self.file, end='\r')
  855. class ProgressIndicatorEndless:
  856. def __init__(self, step=10, file=None):
  857. """
  858. Progress indicator (long row of dots)
  859. :param step: every Nth call, call the func
  860. :param file: output file, default: sys.stderr
  861. """
  862. self.counter = 0 # call counter
  863. self.triggered = 0 # increases 1 per trigger event
  864. self.step = step # trigger every <step> calls
  865. if file is None:
  866. file = sys.stderr
  867. self.file = file
  868. def progress(self):
  869. self.counter += 1
  870. trigger = self.counter % self.step == 0
  871. if trigger:
  872. self.triggered += 1
  873. return trigger
  874. def show(self):
  875. trigger = self.progress()
  876. if trigger:
  877. return self.output(self.triggered)
  878. def output(self, triggered):
  879. print('.', end='', file=self.file, flush=True)
  880. def finish(self):
  881. print(file=self.file)
  882. def sysinfo():
  883. info = []
  884. info.append('Platform: %s' % (' '.join(platform.uname()), ))
  885. if sys.platform.startswith('linux'):
  886. info.append('Linux: %s %s %s' % platform.linux_distribution())
  887. info.append('Borg: %s Python: %s %s' % (borg_version, platform.python_implementation(), platform.python_version()))
  888. info.append('PID: %d CWD: %s' % (os.getpid(), os.getcwd()))
  889. info.append('sys.argv: %r' % sys.argv)
  890. info.append('SSH_ORIGINAL_COMMAND: %r' % os.environ.get('SSH_ORIGINAL_COMMAND'))
  891. info.append('')
  892. return '\n'.join(info)
  893. def log_multi(*msgs, level=logging.INFO):
  894. """
  895. log multiple lines of text, each line by a separate logging call for cosmetic reasons
  896. each positional argument may be a single or multiple lines (separated by \n) of text.
  897. """
  898. lines = []
  899. for msg in msgs:
  900. lines.extend(msg.splitlines())
  901. for line in lines:
  902. logger.log(level, line)
  903. class ItemFormatter:
  904. FIXED_KEYS = {
  905. # Formatting aids
  906. 'LF': '\n',
  907. 'SPACE': ' ',
  908. 'TAB': '\t',
  909. 'CR': '\r',
  910. 'NUL': '\0',
  911. 'NEWLINE': os.linesep,
  912. 'NL': os.linesep,
  913. }
  914. KEY_DESCRIPTIONS = {
  915. 'bpath': 'verbatim POSIX path, can contain any character except NUL',
  916. 'path': 'path interpreted as text (might be missing non-text characters, see bpath)',
  917. 'source': 'link target for links (identical to linktarget)',
  918. 'extra': 'prepends {source} with " -> " for soft links and " link to " for hard links',
  919. 'csize': 'compressed size',
  920. 'num_chunks': 'number of chunks in this file',
  921. 'unique_chunks': 'number of unique chunks in this file',
  922. 'NEWLINE': 'OS dependent line separator',
  923. 'NL': 'alias of NEWLINE',
  924. 'NUL': 'NUL character for creating print0 / xargs -0 like ouput, see bpath',
  925. }
  926. KEY_GROUPS = (
  927. ('type', 'mode', 'uid', 'gid', 'user', 'group', 'path', 'bpath', 'source', 'linktarget'),
  928. ('size', 'csize', 'num_chunks', 'unique_chunks'),
  929. ('mtime', 'ctime', 'atime', 'isomtime', 'isoctime', 'isoatime'),
  930. tuple(sorted(hashlib.algorithms_guaranteed)),
  931. ('archiveid', 'archivename', 'extra'),
  932. ('NEWLINE', 'NL', 'NUL', 'SPACE', 'TAB', 'CR', 'LF'),
  933. )
  934. @classmethod
  935. def available_keys(cls):
  936. class FakeArchive:
  937. fpr = name = ""
  938. fake_item = {
  939. b'mode': 0, b'path': '', b'user': '', b'group': '', b'mtime': 0,
  940. b'uid': 0, b'gid': 0,
  941. }
  942. formatter = cls(FakeArchive, "")
  943. keys = []
  944. keys.extend(formatter.call_keys.keys())
  945. keys.extend(formatter.get_item_data(fake_item).keys())
  946. return keys
  947. @classmethod
  948. def keys_help(cls):
  949. help = []
  950. keys = cls.available_keys()
  951. for group in cls.KEY_GROUPS:
  952. for key in group:
  953. keys.remove(key)
  954. text = " - " + key
  955. if key in cls.KEY_DESCRIPTIONS:
  956. text += ": " + cls.KEY_DESCRIPTIONS[key]
  957. help.append(text)
  958. help.append("")
  959. assert not keys, str(keys)
  960. return "\n".join(help)
  961. def __init__(self, archive, format):
  962. self.archive = archive
  963. static_keys = {
  964. 'archivename': archive.name,
  965. 'archiveid': archive.fpr,
  966. }
  967. static_keys.update(self.FIXED_KEYS)
  968. self.format = partial_format(format, static_keys)
  969. self.format_keys = {f[1] for f in Formatter().parse(format)}
  970. self.call_keys = {
  971. 'size': self.calculate_size,
  972. 'csize': self.calculate_csize,
  973. 'num_chunks': self.calculate_num_chunks,
  974. 'unique_chunks': self.calculate_unique_chunks,
  975. 'isomtime': partial(self.format_time, b'mtime'),
  976. 'isoctime': partial(self.format_time, b'ctime'),
  977. 'isoatime': partial(self.format_time, b'atime'),
  978. 'mtime': partial(self.time, b'mtime'),
  979. 'ctime': partial(self.time, b'ctime'),
  980. 'atime': partial(self.time, b'atime'),
  981. }
  982. for hash_function in hashlib.algorithms_guaranteed:
  983. self.add_key(hash_function, partial(self.hash_item, hash_function))
  984. self.used_call_keys = set(self.call_keys) & self.format_keys
  985. self.item_data = static_keys
  986. def add_key(self, key, callable_with_item):
  987. self.call_keys[key] = callable_with_item
  988. self.used_call_keys = set(self.call_keys) & self.format_keys
  989. def get_item_data(self, item):
  990. mode = stat.filemode(item[b'mode'])
  991. item_type = mode[0]
  992. item_data = self.item_data
  993. source = item.get(b'source', '')
  994. extra = ''
  995. if source:
  996. source = remove_surrogates(source)
  997. if item_type == 'l':
  998. extra = ' -> %s' % source
  999. else:
  1000. mode = 'h' + mode[1:]
  1001. extra = ' link to %s' % source
  1002. item_data['type'] = item_type
  1003. item_data['mode'] = mode
  1004. item_data['user'] = item[b'user'] or item[b'uid']
  1005. item_data['group'] = item[b'group'] or item[b'gid']
  1006. item_data['uid'] = item[b'uid']
  1007. item_data['gid'] = item[b'gid']
  1008. item_data['path'] = remove_surrogates(item[b'path'])
  1009. item_data['bpath'] = item[b'path']
  1010. item_data['source'] = source
  1011. item_data['linktarget'] = source
  1012. item_data['extra'] = extra
  1013. for key in self.used_call_keys:
  1014. item_data[key] = self.call_keys[key](item)
  1015. return item_data
  1016. def format_item(self, item):
  1017. return self.format.format_map(self.get_item_data(item))
  1018. def calculate_num_chunks(self, item):
  1019. return len(item.get(b'chunks', []))
  1020. def calculate_unique_chunks(self, item):
  1021. chunk_index = self.archive.cache.chunks
  1022. return sum(1 for c in item.get(b'chunks', []) if chunk_index[c.id].refcount == 1)
  1023. def calculate_size(self, item):
  1024. return sum(c.size for c in item.get(b'chunks', []))
  1025. def calculate_csize(self, item):
  1026. return sum(c.csize for c in item.get(b'chunks', []))
  1027. def hash_item(self, hash_function, item):
  1028. if b'chunks' not in item:
  1029. return ""
  1030. hash = hashlib.new(hash_function)
  1031. for chunk in self.archive.pipeline.fetch_many([c.id for c in item[b'chunks']]):
  1032. hash.update(chunk)
  1033. return hash.hexdigest()
  1034. def format_time(self, key, item):
  1035. return format_time(safe_timestamp(item.get(key) or item[b'mtime']))
  1036. def time(self, key, item):
  1037. return safe_timestamp(item.get(key) or item[b'mtime'])
  1038. class ChunkIteratorFileWrapper:
  1039. """File-like wrapper for chunk iterators"""
  1040. def __init__(self, chunk_iterator):
  1041. self.chunk_iterator = chunk_iterator
  1042. self.chunk_offset = 0
  1043. self.chunk = b''
  1044. self.exhausted = False
  1045. def _refill(self):
  1046. remaining = len(self.chunk) - self.chunk_offset
  1047. if not remaining:
  1048. try:
  1049. self.chunk = memoryview(next(self.chunk_iterator))
  1050. except StopIteration:
  1051. self.exhausted = True
  1052. return 0 # EOF
  1053. self.chunk_offset = 0
  1054. remaining = len(self.chunk)
  1055. return remaining
  1056. def _read(self, nbytes):
  1057. if not nbytes:
  1058. return b''
  1059. remaining = self._refill()
  1060. will_read = min(remaining, nbytes)
  1061. self.chunk_offset += will_read
  1062. return self.chunk[self.chunk_offset - will_read:self.chunk_offset]
  1063. def read(self, nbytes):
  1064. parts = []
  1065. while nbytes and not self.exhausted:
  1066. read_data = self._read(nbytes)
  1067. nbytes -= len(read_data)
  1068. parts.append(read_data)
  1069. return b''.join(parts)
  1070. def open_item(archive, item):
  1071. """Return file-like object for archived item (with chunks)."""
  1072. chunk_iterator = archive.pipeline.fetch_many([c.id for c in item[b'chunks']])
  1073. return ChunkIteratorFileWrapper(chunk_iterator)
  1074. def file_status(mode):
  1075. if stat.S_ISREG(mode):
  1076. return 'A'
  1077. elif stat.S_ISDIR(mode):
  1078. return 'd'
  1079. elif stat.S_ISBLK(mode):
  1080. return 'b'
  1081. elif stat.S_ISCHR(mode):
  1082. return 'c'
  1083. elif stat.S_ISLNK(mode):
  1084. return 's'
  1085. elif stat.S_ISFIFO(mode):
  1086. return 'f'
  1087. return '?'
  1088. def consume(iterator, n=None):
  1089. """Advance the iterator n-steps ahead. If n is none, consume entirely."""
  1090. # Use functions that consume iterators at C speed.
  1091. if n is None:
  1092. # feed the entire iterator into a zero-length deque
  1093. deque(iterator, maxlen=0)
  1094. else:
  1095. # advance to the empty slice starting at position n
  1096. next(islice(iterator, n, n), None)