helpers.py 51 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565
  1. import argparse
  2. from binascii import hexlify
  3. from collections import namedtuple, deque
  4. from functools import wraps, partial
  5. import sys
  6. if sys.platform != 'win32':
  7. import grp
  8. import pwd
  9. else:
  10. import posixpath
  11. import hashlib
  12. from itertools import islice
  13. import os
  14. import os.path
  15. import stat
  16. import textwrap
  17. import re
  18. from string import Formatter
  19. import platform
  20. import time
  21. import unicodedata
  22. import logging
  23. from .logger import create_logger
  24. logger = create_logger()
  25. from datetime import datetime, timezone, timedelta
  26. from fnmatch import translate
  27. from operator import attrgetter
  28. from . import __version__ as borg_version
  29. from . import hashindex
  30. from . import chunker
  31. from .constants import * # NOQA
  32. from . import crypto
  33. from .compress import COMPR_BUFFER, get_compressor
  34. from . import shellpattern
  35. import msgpack
  36. import msgpack.fallback
  37. import socket
  38. # meta dict, data bytes
  39. _Chunk = namedtuple('_Chunk', 'meta data')
  40. def Chunk(data, **meta):
  41. return _Chunk(meta, data)
  42. class Error(Exception):
  43. """Error base class"""
  44. # if we raise such an Error and it is only catched by the uppermost
  45. # exception handler (that exits short after with the given exit_code),
  46. # it is always a (fatal and abrupt) EXIT_ERROR, never just a warning.
  47. exit_code = EXIT_ERROR
  48. # show a traceback?
  49. traceback = False
  50. def get_message(self):
  51. return type(self).__doc__.format(*self.args)
  52. class ErrorWithTraceback(Error):
  53. """like Error, but show a traceback also"""
  54. traceback = True
  55. class InternalOSError(Error):
  56. """Error while accessing repository: [Errno {}] {}: {}"""
  57. def __init__(self, os_error):
  58. self.errno = os_error.errno
  59. self.strerror = os_error.strerror
  60. self.filename = os_error.filename
  61. def get_message(self):
  62. return self.__doc__.format(self.errno, self.strerror, self.filename)
  63. class IntegrityError(ErrorWithTraceback):
  64. """Data integrity error"""
  65. class ExtensionModuleError(Error):
  66. """The Borg binary extension modules do not seem to be properly installed"""
  67. def check_extension_modules():
  68. from . import platform
  69. if hashindex.API_VERSION != 2:
  70. raise ExtensionModuleError
  71. if chunker.API_VERSION != 2:
  72. raise ExtensionModuleError
  73. if crypto.API_VERSION != 3:
  74. raise ExtensionModuleError
  75. if platform.API_VERSION != 3:
  76. raise ExtensionModuleError
  77. class Manifest:
  78. MANIFEST_ID = b'\0' * 32
  79. def __init__(self, key, repository):
  80. self.archives = {}
  81. self.config = {}
  82. self.key = key
  83. self.repository = repository
  84. @property
  85. def id_str(self):
  86. return bin_to_hex(self.id)
  87. @classmethod
  88. def load(cls, repository, key=None):
  89. from .key import key_factory
  90. cdata = repository.get(cls.MANIFEST_ID)
  91. if not key:
  92. key = key_factory(repository, cdata)
  93. manifest = cls(key, repository)
  94. _, data = key.decrypt(None, cdata)
  95. manifest.id = key.id_hash(data)
  96. m = msgpack.unpackb(data)
  97. if not m.get(b'version') == 1:
  98. raise ValueError('Invalid manifest version')
  99. manifest.archives = dict((k.decode('utf-8'), v) for k, v in m[b'archives'].items())
  100. manifest.timestamp = m.get(b'timestamp')
  101. if manifest.timestamp:
  102. manifest.timestamp = manifest.timestamp.decode('ascii')
  103. manifest.config = m[b'config']
  104. return manifest, key
  105. def write(self):
  106. self.timestamp = datetime.utcnow().isoformat()
  107. data = msgpack.packb(StableDict({
  108. 'version': 1,
  109. 'archives': self.archives,
  110. 'timestamp': self.timestamp,
  111. 'config': self.config,
  112. }))
  113. self.id = self.key.id_hash(data)
  114. self.repository.put(self.MANIFEST_ID, self.key.encrypt(Chunk(data)))
  115. def list_archive_infos(self, sort_by=None, reverse=False):
  116. # inexpensive Archive.list_archives replacement if we just need .name, .id, .ts
  117. ArchiveInfo = namedtuple('ArchiveInfo', 'name id ts')
  118. archives = []
  119. for name, values in self.archives.items():
  120. ts = parse_timestamp(values[b'time'].decode('utf-8'))
  121. id = values[b'id']
  122. archives.append(ArchiveInfo(name=name, id=id, ts=ts))
  123. if sort_by is not None:
  124. archives = sorted(archives, key=attrgetter(sort_by), reverse=reverse)
  125. return archives
  126. def prune_within(archives, within):
  127. multiplier = {'H': 1, 'd': 24, 'w': 24 * 7, 'm': 24 * 31, 'y': 24 * 365}
  128. try:
  129. hours = int(within[:-1]) * multiplier[within[-1]]
  130. except (KeyError, ValueError):
  131. # I don't like how this displays the original exception too:
  132. raise argparse.ArgumentTypeError('Unable to parse --within option: "%s"' % within)
  133. if hours <= 0:
  134. raise argparse.ArgumentTypeError('Number specified using --within option must be positive')
  135. target = datetime.now(timezone.utc) - timedelta(seconds=hours * 3600)
  136. return [a for a in archives if a.ts > target]
  137. def prune_split(archives, pattern, n, skip=[]):
  138. last = None
  139. keep = []
  140. if n == 0:
  141. return keep
  142. for a in sorted(archives, key=attrgetter('ts'), reverse=True):
  143. period = to_localtime(a.ts).strftime(pattern)
  144. if period != last:
  145. last = period
  146. if a not in skip:
  147. keep.append(a)
  148. if len(keep) == n:
  149. break
  150. return keep
  151. def get_home_dir():
  152. """Get user's home directory while preferring a possibly set HOME
  153. environment variable
  154. """
  155. # os.path.expanduser() behaves differently for '~' and '~someuser' as
  156. # parameters: when called with an explicit username, the possibly set
  157. # environment variable HOME is no longer respected. So we have to check if
  158. # it is set and only expand the user's home directory if HOME is unset.
  159. if os.environ.get('HOME', ''):
  160. return os.environ.get('HOME')
  161. else:
  162. return os.path.expanduser('~%s' % os.environ.get('USER', ''))
  163. def get_keys_dir():
  164. """Determine where to repository keys and cache"""
  165. xdg_config = os.environ.get('XDG_CONFIG_HOME', os.path.join(get_home_dir(), '.config'))
  166. keys_dir = os.environ.get('BORG_KEYS_DIR', os.path.join(xdg_config, 'borg', 'keys'))
  167. if not os.path.exists(keys_dir):
  168. os.makedirs(keys_dir)
  169. os.chmod(keys_dir, stat.S_IRWXU)
  170. return keys_dir
  171. def get_cache_dir():
  172. """Determine where to repository keys and cache"""
  173. xdg_cache = os.environ.get('XDG_CACHE_HOME', os.path.join(get_home_dir(), '.cache'))
  174. cache_dir = os.environ.get('BORG_CACHE_DIR', os.path.join(xdg_cache, 'borg'))
  175. if not os.path.exists(cache_dir):
  176. os.makedirs(cache_dir)
  177. os.chmod(cache_dir, stat.S_IRWXU)
  178. with open(os.path.join(cache_dir, CACHE_TAG_NAME), 'wb') as fd:
  179. fd.write(CACHE_TAG_CONTENTS)
  180. fd.write(textwrap.dedent("""
  181. # This file is a cache directory tag created by Borg.
  182. # For information about cache directory tags, see:
  183. # http://www.brynosaurus.com/cachedir/
  184. """).encode('ascii'))
  185. return cache_dir
  186. def to_localtime(ts):
  187. """Convert datetime object from UTC to local time zone"""
  188. return datetime(*time.localtime((ts - datetime(1970, 1, 1, tzinfo=timezone.utc)).total_seconds())[:6])
  189. def parse_timestamp(timestamp):
  190. """Parse a ISO 8601 timestamp string"""
  191. if '.' in timestamp: # microseconds might not be present
  192. return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%f').replace(tzinfo=timezone.utc)
  193. else:
  194. return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc)
  195. def load_excludes(fh):
  196. """Load and parse exclude patterns from file object. Lines empty or starting with '#' after stripping whitespace on
  197. both line ends are ignored.
  198. """
  199. return [parse_pattern(pattern) for pattern in clean_lines(fh)]
  200. def update_excludes(args):
  201. """Merge exclude patterns from files with those on command line."""
  202. if hasattr(args, 'exclude_files') and args.exclude_files:
  203. if not hasattr(args, 'excludes') or args.excludes is None:
  204. args.excludes = []
  205. for file in args.exclude_files:
  206. args.excludes += load_excludes(file)
  207. file.close()
  208. class PatternMatcher:
  209. def __init__(self, fallback=None):
  210. self._items = []
  211. # Value to return from match function when none of the patterns match.
  212. self.fallback = fallback
  213. def empty(self):
  214. return not len(self._items)
  215. def add(self, patterns, value):
  216. """Add list of patterns to internal list. The given value is returned from the match function when one of the
  217. given patterns matches.
  218. """
  219. self._items.extend((i, value) for i in patterns)
  220. def match(self, path):
  221. for (pattern, value) in self._items:
  222. if pattern.match(path):
  223. return value
  224. return self.fallback
  225. def normalized(func):
  226. """ Decorator for the Pattern match methods, returning a wrapper that
  227. normalizes OSX paths to match the normalized pattern on OSX, and
  228. returning the original method on other platforms"""
  229. @wraps(func)
  230. def normalize_wrapper(self, path):
  231. return func(self, unicodedata.normalize("NFD", path))
  232. if sys.platform in ('darwin',):
  233. # HFS+ converts paths to a canonical form, so users shouldn't be
  234. # required to enter an exact match
  235. return normalize_wrapper
  236. else:
  237. # Windows and Unix filesystems allow different forms, so users
  238. # always have to enter an exact match
  239. return func
  240. class PatternBase:
  241. """Shared logic for inclusion/exclusion patterns.
  242. """
  243. PREFIX = NotImplemented
  244. def __init__(self, pattern):
  245. self.pattern_orig = pattern
  246. self.match_count = 0
  247. if sys.platform in ('darwin',):
  248. pattern = unicodedata.normalize("NFD", pattern)
  249. self._prepare(pattern)
  250. @normalized
  251. def match(self, path):
  252. matches = self._match(path)
  253. if matches:
  254. self.match_count += 1
  255. return matches
  256. def __repr__(self):
  257. return '%s(%s)' % (type(self), self.pattern)
  258. def __str__(self):
  259. return self.pattern_orig
  260. def _prepare(self, pattern):
  261. raise NotImplementedError
  262. def _match(self, path):
  263. raise NotImplementedError
  264. # For PathPrefixPattern, FnmatchPattern and ShellPattern, we require that the pattern either match the whole path
  265. # or an initial segment of the path up to but not including a path separator. To unify the two cases, we add a path
  266. # separator to the end of the path before matching.
  267. class PathPrefixPattern(PatternBase):
  268. """Literal files or directories listed on the command line
  269. for some operations (e.g. extract, but not create).
  270. If a directory is specified, all paths that start with that
  271. path match as well. A trailing slash makes no difference.
  272. """
  273. PREFIX = "pp"
  274. def _prepare(self, pattern):
  275. if sys.platform != 'win32':
  276. self.pattern = os.path.normpath(pattern).rstrip(os.path.sep) + os.path.sep
  277. else:
  278. self.pattern = posixpath.normpath(pattern).rstrip(posixpath.sep) + posixpath.sep
  279. def _match(self, path):
  280. if sys.platform != 'win32':
  281. return (path + os.path.sep).startswith(self.pattern)
  282. else:
  283. return (path + posixpath.sep).startswith(self.pattern)
  284. class FnmatchPattern(PatternBase):
  285. """Shell glob patterns to exclude. A trailing slash means to
  286. exclude the contents of a directory, but not the directory itself.
  287. """
  288. PREFIX = "fm"
  289. def _prepare(self, pattern):
  290. if pattern.endswith(os.path.sep):
  291. pattern = os.path.normpath(pattern).rstrip(os.path.sep) + os.path.sep + '*' + os.path.sep
  292. else:
  293. pattern = os.path.normpath(pattern) + os.path.sep + '*'
  294. self.pattern = pattern
  295. # fnmatch and re.match both cache compiled regular expressions.
  296. # Nevertheless, this is about 10 times faster.
  297. self.regex = re.compile(translate(self.pattern))
  298. def _match(self, path):
  299. return (self.regex.match(path + os.path.sep) is not None)
  300. class ShellPattern(PatternBase):
  301. """Shell glob patterns to exclude. A trailing slash means to
  302. exclude the contents of a directory, but not the directory itself.
  303. """
  304. PREFIX = "sh"
  305. def _prepare(self, pattern):
  306. sep = os.path.sep
  307. if pattern.endswith(sep):
  308. pattern = os.path.normpath(pattern).rstrip(sep) + sep + "**" + sep + "*" + sep
  309. else:
  310. pattern = os.path.normpath(pattern) + sep + "**" + sep + "*"
  311. self.pattern = pattern
  312. self.regex = re.compile(shellpattern.translate(self.pattern))
  313. def _match(self, path):
  314. return (self.regex.match(path + os.path.sep) is not None)
  315. class RegexPattern(PatternBase):
  316. """Regular expression to exclude.
  317. """
  318. PREFIX = "re"
  319. def _prepare(self, pattern):
  320. self.pattern = pattern
  321. self.regex = re.compile(pattern)
  322. def _match(self, path):
  323. # Normalize path separators
  324. if os.path.sep != '/':
  325. path = path.replace(os.path.sep, '/')
  326. return (self.regex.search(path) is not None)
  327. _PATTERN_STYLES = set([
  328. FnmatchPattern,
  329. PathPrefixPattern,
  330. RegexPattern,
  331. ShellPattern,
  332. ])
  333. _PATTERN_STYLE_BY_PREFIX = dict((i.PREFIX, i) for i in _PATTERN_STYLES)
  334. def parse_pattern(pattern, fallback=FnmatchPattern):
  335. """Read pattern from string and return an instance of the appropriate implementation class.
  336. """
  337. if len(pattern) > 2 and pattern[2] == ":" and pattern[:2].isalnum():
  338. (style, pattern) = (pattern[:2], pattern[3:])
  339. cls = _PATTERN_STYLE_BY_PREFIX.get(style, None)
  340. if cls is None:
  341. raise ValueError("Unknown pattern style: {}".format(style))
  342. else:
  343. cls = fallback
  344. return cls(pattern)
  345. def timestamp(s):
  346. """Convert a --timestamp=s argument to a datetime object"""
  347. try:
  348. # is it pointing to a file / directory?
  349. ts = os.stat(s).st_mtime
  350. return datetime.utcfromtimestamp(ts)
  351. except OSError:
  352. # didn't work, try parsing as timestamp. UTC, no TZ, no microsecs support.
  353. for format in ('%Y-%m-%dT%H:%M:%SZ', '%Y-%m-%dT%H:%M:%S+00:00',
  354. '%Y-%m-%dT%H:%M:%S', '%Y-%m-%d %H:%M:%S',
  355. '%Y-%m-%dT%H:%M', '%Y-%m-%d %H:%M',
  356. '%Y-%m-%d', '%Y-%j',
  357. ):
  358. try:
  359. return datetime.strptime(s, format)
  360. except ValueError:
  361. continue
  362. raise ValueError
  363. def ChunkerParams(s):
  364. if s.strip().lower() == "default":
  365. return CHUNKER_PARAMS
  366. chunk_min, chunk_max, chunk_mask, window_size = s.split(',')
  367. if int(chunk_max) > 23:
  368. # do not go beyond 2**23 (8MB) chunk size now,
  369. # COMPR_BUFFER can only cope with up to this size
  370. raise ValueError('max. chunk size exponent must not be more than 23 (2^23 = 8MiB max. chunk size)')
  371. return int(chunk_min), int(chunk_max), int(chunk_mask), int(window_size)
  372. def CompressionSpec(s):
  373. values = s.split(',')
  374. count = len(values)
  375. if count < 1:
  376. raise ValueError
  377. # --compression algo[,level]
  378. name = values[0]
  379. if name in ('none', 'lz4', ):
  380. return dict(name=name)
  381. if name in ('zlib', 'lzma', ):
  382. if count < 2:
  383. level = 6 # default compression level in py stdlib
  384. elif count == 2:
  385. level = int(values[1])
  386. if not 0 <= level <= 9:
  387. raise ValueError
  388. else:
  389. raise ValueError
  390. return dict(name=name, level=level)
  391. if name == 'auto':
  392. if 2 <= count <= 3:
  393. compression = ','.join(values[1:])
  394. else:
  395. raise ValueError
  396. return dict(name=name, spec=CompressionSpec(compression))
  397. raise ValueError
  398. def dir_is_cachedir(path):
  399. """Determines whether the specified path is a cache directory (and
  400. therefore should potentially be excluded from the backup) according to
  401. the CACHEDIR.TAG protocol
  402. (http://www.brynosaurus.com/cachedir/spec.html).
  403. """
  404. tag_path = os.path.join(path, CACHE_TAG_NAME)
  405. try:
  406. if os.path.exists(tag_path):
  407. with open(tag_path, 'rb') as tag_file:
  408. tag_data = tag_file.read(len(CACHE_TAG_CONTENTS))
  409. if tag_data == CACHE_TAG_CONTENTS:
  410. return True
  411. except OSError:
  412. pass
  413. return False
  414. def dir_is_tagged(path, exclude_caches, exclude_if_present):
  415. """Determines whether the specified path is excluded by being a cache
  416. directory or containing user-specified tag files. Returns a list of the
  417. paths of the tag files (either CACHEDIR.TAG or the matching
  418. user-specified files).
  419. """
  420. tag_paths = []
  421. if exclude_caches and dir_is_cachedir(path):
  422. tag_paths.append(os.path.join(path, CACHE_TAG_NAME))
  423. if exclude_if_present is not None:
  424. for tag in exclude_if_present:
  425. tag_path = os.path.join(path, tag)
  426. if os.path.isfile(tag_path):
  427. tag_paths.append(tag_path)
  428. return tag_paths
  429. def partial_format(format, mapping):
  430. """
  431. Apply format.format_map(mapping) while preserving unknown keys
  432. Does not support attribute access, indexing and ![rsa] conversions
  433. """
  434. for key, value in mapping.items():
  435. key = re.escape(key)
  436. format = re.sub(r'(?<!\{)((\{%s\})|(\{%s:[^\}]*\}))' % (key, key),
  437. lambda match: match.group(1).format_map(mapping),
  438. format)
  439. return format
  440. def format_line(format, data):
  441. # TODO: Filter out unwanted properties of str.format(), because "format" is user provided.
  442. try:
  443. return format.format(**data)
  444. except (KeyError, ValueError) as e:
  445. # this should catch format errors
  446. print('Error in lineformat: "{}" - reason "{}"'.format(format, str(e)))
  447. except Exception as e:
  448. # something unexpected, print error and raise exception
  449. print('Error in lineformat: "{}" - reason "{}"'.format(format, str(e)))
  450. raise
  451. return ''
  452. def safe_timestamp(item_timestamp_ns):
  453. try:
  454. return datetime.fromtimestamp(bigint_to_int(item_timestamp_ns) / 1e9)
  455. except OverflowError:
  456. # likely a broken file time and datetime did not want to go beyond year 9999
  457. return datetime(9999, 12, 31, 23, 59, 59)
  458. def format_time(t):
  459. """use ISO-8601 date and time format
  460. """
  461. return t.strftime('%a, %Y-%m-%d %H:%M:%S')
  462. def format_timedelta(td):
  463. """Format timedelta in a human friendly format
  464. """
  465. # Since td.total_seconds() requires python 2.7
  466. ts = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6)
  467. s = ts % 60
  468. m = int(ts / 60) % 60
  469. h = int(ts / 3600) % 24
  470. txt = '%.2f seconds' % s
  471. if m:
  472. txt = '%d minutes %s' % (m, txt)
  473. if h:
  474. txt = '%d hours %s' % (h, txt)
  475. if td.days:
  476. txt = '%d days %s' % (td.days, txt)
  477. return txt
  478. def format_file_size(v, precision=2, sign=False):
  479. """Format file size into a human friendly format
  480. """
  481. return sizeof_fmt_decimal(v, suffix='B', sep=' ', precision=precision, sign=sign)
  482. def sizeof_fmt(num, suffix='B', units=None, power=None, sep='', precision=2, sign=False):
  483. prefix = '+' if sign and num > 0 else ''
  484. for unit in units[:-1]:
  485. if abs(round(num, precision)) < power:
  486. if isinstance(num, int):
  487. return "{}{}{}{}{}".format(prefix, num, sep, unit, suffix)
  488. else:
  489. return "{}{:3.{}f}{}{}{}".format(prefix, num, precision, sep, unit, suffix)
  490. num /= float(power)
  491. return "{}{:.{}f}{}{}{}".format(prefix, num, precision, sep, units[-1], suffix)
  492. def sizeof_fmt_iec(num, suffix='B', sep='', precision=2, sign=False):
  493. return sizeof_fmt(num, suffix=suffix, sep=sep, precision=precision, sign=sign,
  494. units=['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'], power=1024)
  495. def sizeof_fmt_decimal(num, suffix='B', sep='', precision=2, sign=False):
  496. return sizeof_fmt(num, suffix=suffix, sep=sep, precision=precision, sign=sign,
  497. units=['', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y'], power=1000)
  498. def format_archive(archive):
  499. return '%-36s %s [%s]' % (
  500. archive.name,
  501. format_time(to_localtime(archive.ts)),
  502. bin_to_hex(archive.id),
  503. )
  504. def memoize(function):
  505. cache = {}
  506. def decorated_function(*args):
  507. try:
  508. return cache[args]
  509. except KeyError:
  510. val = function(*args)
  511. cache[args] = val
  512. return val
  513. return decorated_function
  514. @memoize
  515. def uid2user(uid, default=None):
  516. try:
  517. if sys.platform != 'win32':
  518. return pwd.getpwuid(uid).pw_name
  519. else:
  520. return os.getlogin()
  521. except KeyError:
  522. return default
  523. @memoize
  524. def user2uid(user, default=None):
  525. try:
  526. if sys.platform != 'win32':
  527. return user and pwd.getpwnam(user).pw_uid
  528. else:
  529. return user and 0
  530. except KeyError:
  531. return default
  532. @memoize
  533. def gid2group(gid, default=None):
  534. try:
  535. if sys.platform != 'win32':
  536. return grp.getgrgid(gid).gr_name
  537. else:
  538. return ''
  539. except KeyError:
  540. return default
  541. @memoize
  542. def group2gid(group, default=None):
  543. if sys.platform != 'win32':
  544. if group == '':
  545. return 0 # From windows
  546. try:
  547. return group and grp.getgrnam(group).gr_gid
  548. except KeyError:
  549. return default
  550. else:
  551. return 0
  552. def getuid():
  553. if sys.platform != 'win32':
  554. return os.getuid()
  555. else:
  556. return 0
  557. def posix_acl_use_stored_uid_gid(acl):
  558. """Replace the user/group field with the stored uid/gid
  559. """
  560. entries = []
  561. for entry in safe_decode(acl).split('\n'):
  562. if entry:
  563. fields = entry.split(':')
  564. if len(fields) == 4:
  565. entries.append(':'.join([fields[0], fields[3], fields[2]]))
  566. else:
  567. entries.append(entry)
  568. return safe_encode('\n'.join(entries))
  569. def safe_decode(s, coding='utf-8', errors='surrogateescape'):
  570. """decode bytes to str, with round-tripping "invalid" bytes"""
  571. if s is None:
  572. return None
  573. return s.decode(coding, errors)
  574. def safe_encode(s, coding='utf-8', errors='surrogateescape'):
  575. """encode str to bytes, with round-tripping "invalid" bytes"""
  576. if s is None:
  577. return None
  578. return s.encode(coding, errors)
  579. def bin_to_hex(binary):
  580. return hexlify(binary).decode('ascii')
  581. class Location:
  582. """Object representing a repository / archive location
  583. """
  584. proto = user = host = port = path = archive = None
  585. # borg mount's FUSE filesystem creates one level of directories from
  586. # the archive names. Thus, we must not accept "/" in archive names.
  587. ssh_re = re.compile(r'(?P<proto>ssh)://(?:(?P<user>[^@]+)@)?'
  588. r'(?P<host>[^:/#]+)(?::(?P<port>\d+))?'
  589. r'(?P<path>[^:]+)(?:::(?P<archive>[^/]+))?$')
  590. file_re = None
  591. if sys.platform != 'win32':
  592. file_re = re.compile(r'(?P<proto>file)://'
  593. r'(?P<path>[^:]+)(?:::(?P<archive>[^/]+))?$')
  594. else:
  595. file_re = re.compile(r'((?P<proto>file)://)?'
  596. r'(?P<drive>[a-zA-Z])?:[\\/](?P<path>[^:]+)(?:::(?P<archive>[^/]+))?$')
  597. scp_re = re.compile(r'((?:(?P<user>[^@]+)@)?(?P<host>[^:/]+):)?'
  598. r'(?P<path>[^:]+)(?:::(?P<archive>[^/]+))?$')
  599. # get the repo from BORG_RE env and the optional archive from param.
  600. # if the syntax requires giving REPOSITORY (see "borg mount"),
  601. # use "::" to let it use the env var.
  602. # if REPOSITORY argument is optional, it'll automatically use the env.
  603. env_re = re.compile(r'(?:::(?P<archive>[^/]+)?)?$')
  604. def __init__(self, text=''):
  605. self.orig = text
  606. if not self.parse(self.orig):
  607. raise ValueError
  608. def preformat_text(self, text):
  609. """Format repository and archive path with common tags"""
  610. current_time = datetime.now()
  611. data = {
  612. 'pid': os.getpid(),
  613. 'fqdn': socket.getfqdn(),
  614. 'hostname': socket.gethostname(),
  615. 'now': current_time.now(),
  616. 'utcnow': current_time.utcnow(),
  617. 'user': uid2user(getuid(), getuid())
  618. }
  619. return format_line(text, data)
  620. def parse(self, text):
  621. text = self.preformat_text(text)
  622. valid = self._parse(text)
  623. if valid:
  624. return True
  625. m = self.env_re.match(text)
  626. if not m:
  627. return False
  628. repo = os.environ.get('BORG_REPO')
  629. if repo is None:
  630. return False
  631. valid = self._parse(repo)
  632. if not valid:
  633. return False
  634. self.archive = m.group('archive')
  635. return True
  636. def _parse(self, text):
  637. if sys.platform == 'win32':
  638. m = self.file_re.match(text)
  639. if m:
  640. self.proto = m.group('proto')
  641. self.path = posixpath.normpath(m.group('drive') + ":\\" + m.group('path'))
  642. self.archive = m.group('archive')
  643. return True
  644. m = self.ssh_re.match(text)
  645. if m:
  646. self.proto = m.group('proto')
  647. self.user = m.group('user')
  648. self.host = m.group('host')
  649. self.port = m.group('port') and int(m.group('port')) or None
  650. if sys.platform != 'win32':
  651. self.path = os.path.normpath(m.group('path'))
  652. else:
  653. self.path = posixpath.normpath(m.group('path'))
  654. self.archive = m.group('archive')
  655. return True
  656. if sys.platform != 'win32':
  657. m = self.file_re.match(text)
  658. if m:
  659. self.proto = m.group('proto')
  660. self.path = os.path.normpath(m.group('path'))
  661. self.archive = m.group('archive')
  662. return True
  663. m = self.scp_re.match(text)
  664. if m:
  665. self.user = m.group('user')
  666. self.host = m.group('host')
  667. if sys.platform != 'win32':
  668. self.path = os.path.normpath(m.group('path'))
  669. else:
  670. self.path = posixpath.normpath(m.group('path'))
  671. self.archive = m.group('archive')
  672. self.proto = self.host and 'ssh' or 'file'
  673. return True
  674. return False
  675. def __str__(self):
  676. items = [
  677. 'proto=%r' % self.proto,
  678. 'user=%r' % self.user,
  679. 'host=%r' % self.host,
  680. 'port=%r' % self.port,
  681. 'path=%r' % self.path,
  682. 'archive=%r' % self.archive,
  683. ]
  684. return ', '.join(items)
  685. def to_key_filename(self):
  686. name = re.sub('[^\w]', '_', self.path).strip('_')
  687. if self.proto != 'file':
  688. name = self.host + '__' + name
  689. return os.path.join(get_keys_dir(), name)
  690. def __repr__(self):
  691. return "Location(%s)" % self
  692. def canonical_path(self):
  693. if self.proto == 'file':
  694. return self.path
  695. else:
  696. if self.path and self.path.startswith('~'):
  697. path = '/' + self.path
  698. elif self.path and not self.path.startswith('/'):
  699. path = '/~/' + self.path
  700. else:
  701. path = self.path
  702. return 'ssh://{}{}{}{}'.format('{}@'.format(self.user) if self.user else '',
  703. self.host,
  704. ':{}'.format(self.port) if self.port else '',
  705. path)
  706. def location_validator(archive=None):
  707. def validator(text):
  708. try:
  709. loc = Location(text)
  710. except ValueError:
  711. raise argparse.ArgumentTypeError('Invalid location format: "%s"' % text) from None
  712. if archive is True and not loc.archive:
  713. raise argparse.ArgumentTypeError('"%s": No archive specified' % text)
  714. elif archive is False and loc.archive:
  715. raise argparse.ArgumentTypeError('"%s" No archive can be specified' % text)
  716. return loc
  717. return validator
  718. def archivename_validator():
  719. def validator(text):
  720. if '/' in text or '::' in text or not text:
  721. raise argparse.ArgumentTypeError('Invalid repository name: "%s"' % text)
  722. return text
  723. return validator
  724. def decode_dict(d, keys, encoding='utf-8', errors='surrogateescape'):
  725. for key in keys:
  726. if isinstance(d.get(key), bytes):
  727. d[key] = d[key].decode(encoding, errors)
  728. return d
  729. def remove_surrogates(s, errors='replace'):
  730. """Replace surrogates generated by fsdecode with '?'
  731. """
  732. return s.encode('utf-8', errors).decode('utf-8')
  733. _safe_re = None
  734. if sys.platform != 'win32':
  735. _safe_re = re.compile(r'^((\.\.)?/+)+')
  736. else:
  737. _safe_re = re.compile(r'^((\.\.)?[/\\]+)+')
  738. def make_path_safe(path):
  739. """Make path safe by making it relative and local
  740. """
  741. if sys.platform != 'win32':
  742. return _safe_re.sub('', path) or '.'
  743. else:
  744. tail = path
  745. if len(path) > 2 and (path[0:2] == '//' or path[0:2] == '\\\\' or path[1] == ':'):
  746. drive, tail = os.path.splitdrive(path)
  747. tail = tail.replace('\\', '/')
  748. return posixpath.normpath(_safe_re.sub('', tail) or '.')
  749. def daemonize():
  750. """Detach process from controlling terminal and run in background
  751. """
  752. pid = os.fork()
  753. if pid:
  754. os._exit(0)
  755. os.setsid()
  756. pid = os.fork()
  757. if pid:
  758. os._exit(0)
  759. os.chdir('/')
  760. os.close(0)
  761. os.close(1)
  762. os.close(2)
  763. fd = os.open('/dev/null', os.O_RDWR)
  764. os.dup2(fd, 0)
  765. os.dup2(fd, 1)
  766. os.dup2(fd, 2)
  767. class StableDict(dict):
  768. """A dict subclass with stable items() ordering"""
  769. def items(self):
  770. return sorted(super().items())
  771. def bigint_to_int(mtime):
  772. """Convert bytearray to int
  773. """
  774. if isinstance(mtime, bytes):
  775. return int.from_bytes(mtime, 'little', signed=True)
  776. return mtime
  777. def int_to_bigint(value):
  778. """Convert integers larger than 64 bits to bytearray
  779. Smaller integers are left alone
  780. """
  781. if value.bit_length() > 63:
  782. return value.to_bytes((value.bit_length() + 9) // 8, 'little', signed=True)
  783. return value
  784. def is_slow_msgpack():
  785. return msgpack.Packer is msgpack.fallback.Packer
  786. FALSISH = ('No', 'NO', 'no', 'N', 'n', '0', )
  787. TRUISH = ('Yes', 'YES', 'yes', 'Y', 'y', '1', )
  788. DEFAULTISH = ('Default', 'DEFAULT', 'default', 'D', 'd', '', )
  789. def yes(msg=None, false_msg=None, true_msg=None, default_msg=None,
  790. retry_msg=None, invalid_msg=None, env_msg=None,
  791. falsish=FALSISH, truish=TRUISH, defaultish=DEFAULTISH,
  792. default=False, retry=True, env_var_override=None, ofile=None, input=input):
  793. """
  794. Output <msg> (usually a question) and let user input an answer.
  795. Qualifies the answer according to falsish, truish and defaultish as True, False or <default>.
  796. If it didn't qualify and retry_msg is None (no retries wanted),
  797. return the default [which defaults to False]. Otherwise let user retry
  798. answering until answer is qualified.
  799. If env_var_override is given and this var is present in the environment, do not ask
  800. the user, but just use the env var contents as answer as if it was typed in.
  801. Otherwise read input from stdin and proceed as normal.
  802. If EOF is received instead an input or an invalid input without retry possibility,
  803. return default.
  804. :param msg: introducing message to output on ofile, no \n is added [None]
  805. :param retry_msg: retry message to output on ofile, no \n is added [None]
  806. :param false_msg: message to output before returning False [None]
  807. :param true_msg: message to output before returning True [None]
  808. :param default_msg: message to output before returning a <default> [None]
  809. :param invalid_msg: message to output after a invalid answer was given [None]
  810. :param env_msg: message to output when using input from env_var_override [None],
  811. needs to have 2 placeholders for answer and env var name, e.g.: "{} (from {})"
  812. :param falsish: sequence of answers qualifying as False
  813. :param truish: sequence of answers qualifying as True
  814. :param defaultish: sequence of answers qualifying as <default>
  815. :param default: default return value (defaultish answer was given or no-answer condition) [False]
  816. :param retry: if True and input is incorrect, retry. Otherwise return default. [True]
  817. :param env_var_override: environment variable name [None]
  818. :param ofile: output stream [sys.stderr]
  819. :param input: input function [input from builtins]
  820. :return: boolean answer value, True or False
  821. """
  822. # note: we do not assign sys.stderr as default above, so it is
  823. # really evaluated NOW, not at function definition time.
  824. if ofile is None:
  825. ofile = sys.stderr
  826. if default not in (True, False):
  827. raise ValueError("invalid default value, must be True or False")
  828. if msg:
  829. print(msg, file=ofile, end='', flush=True)
  830. while True:
  831. answer = None
  832. if env_var_override:
  833. answer = os.environ.get(env_var_override)
  834. if answer is not None and env_msg:
  835. print(env_msg.format(answer, env_var_override), file=ofile)
  836. if answer is None:
  837. try:
  838. answer = input()
  839. except EOFError:
  840. # avoid defaultish[0], defaultish could be empty
  841. answer = truish[0] if default else falsish[0]
  842. if answer in defaultish:
  843. if default_msg:
  844. print(default_msg, file=ofile)
  845. return default
  846. if answer in truish:
  847. if true_msg:
  848. print(true_msg, file=ofile)
  849. return True
  850. if answer in falsish:
  851. if false_msg:
  852. print(false_msg, file=ofile)
  853. return False
  854. # if we get here, the answer was invalid
  855. if invalid_msg:
  856. print(invalid_msg, file=ofile)
  857. if not retry:
  858. return default
  859. if retry_msg:
  860. print(retry_msg, file=ofile, end='', flush=True)
  861. # in case we used an environment variable and it gave an invalid answer, do not use it again:
  862. env_var_override = None
  863. class ProgressIndicatorPercent:
  864. def __init__(self, total, step=5, start=0, same_line=False, msg="%3.0f%%"):
  865. """
  866. Percentage-based progress indicator
  867. :param total: total amount of items
  868. :param step: step size in percent
  869. :param start: at which percent value to start
  870. :param same_line: if True, emit output always on same line
  871. :param msg: output message, must contain one %f placeholder for the percentage
  872. """
  873. self.counter = 0 # 0 .. (total-1)
  874. self.total = total
  875. self.trigger_at = start # output next percentage value when reaching (at least) this
  876. self.step = step
  877. self.msg = msg
  878. self.same_line = same_line
  879. self.handler = None
  880. self.logger = logging.getLogger('borg.output.progress')
  881. # If there are no handlers, set one up explicitly because the
  882. # terminator and propagation needs to be set. If there are,
  883. # they must have been set up by BORG_LOGGING_CONF: skip setup.
  884. if not self.logger.handlers:
  885. self.handler = logging.StreamHandler(stream=sys.stderr)
  886. self.handler.setLevel(logging.INFO)
  887. self.handler.terminator = '\r' if self.same_line else '\n'
  888. self.logger.addHandler(self.handler)
  889. if self.logger.level == logging.NOTSET:
  890. self.logger.setLevel(logging.WARN)
  891. self.logger.propagate = False
  892. def __del__(self):
  893. if self.handler is not None:
  894. self.logger.removeHandler(self.handler)
  895. self.handler.close()
  896. def progress(self, current=None):
  897. if current is not None:
  898. self.counter = current
  899. pct = self.counter * 100 / self.total
  900. self.counter += 1
  901. if pct >= self.trigger_at:
  902. self.trigger_at += self.step
  903. return pct
  904. def show(self, current=None):
  905. pct = self.progress(current)
  906. if pct is not None:
  907. return self.output(pct)
  908. def output(self, percent):
  909. self.logger.info(self.msg % percent)
  910. def finish(self):
  911. if self.same_line:
  912. self.logger.info(" " * len(self.msg % 100.0))
  913. class ProgressIndicatorEndless:
  914. def __init__(self, step=10, file=None):
  915. """
  916. Progress indicator (long row of dots)
  917. :param step: every Nth call, call the func
  918. :param file: output file, default: sys.stderr
  919. """
  920. self.counter = 0 # call counter
  921. self.triggered = 0 # increases 1 per trigger event
  922. self.step = step # trigger every <step> calls
  923. if file is None:
  924. file = sys.stderr
  925. self.file = file
  926. def progress(self):
  927. self.counter += 1
  928. trigger = self.counter % self.step == 0
  929. if trigger:
  930. self.triggered += 1
  931. return trigger
  932. def show(self):
  933. trigger = self.progress()
  934. if trigger:
  935. return self.output(self.triggered)
  936. def output(self, triggered):
  937. print('.', end='', file=self.file, flush=True)
  938. def finish(self):
  939. print(file=self.file)
  940. def sysinfo():
  941. info = []
  942. info.append('Platform: %s' % (' '.join(platform.uname()), ))
  943. if sys.platform.startswith('linux'):
  944. info.append('Linux: %s %s %s' % platform.linux_distribution())
  945. info.append('Borg: %s Python: %s %s' % (borg_version, platform.python_implementation(), platform.python_version()))
  946. info.append('PID: %d CWD: %s' % (os.getpid(), os.getcwd()))
  947. info.append('sys.argv: %r' % sys.argv)
  948. info.append('SSH_ORIGINAL_COMMAND: %r' % os.environ.get('SSH_ORIGINAL_COMMAND'))
  949. info.append('')
  950. return '\n'.join(info)
  951. def log_multi(*msgs, level=logging.INFO, logger=logger):
  952. """
  953. log multiple lines of text, each line by a separate logging call for cosmetic reasons
  954. each positional argument may be a single or multiple lines (separated by \n) of text.
  955. """
  956. lines = []
  957. for msg in msgs:
  958. lines.extend(msg.splitlines())
  959. for line in lines:
  960. logger.log(level, line)
  961. class ItemFormatter:
  962. FIXED_KEYS = {
  963. # Formatting aids
  964. 'LF': '\n',
  965. 'SPACE': ' ',
  966. 'TAB': '\t',
  967. 'CR': '\r',
  968. 'NUL': '\0',
  969. 'NEWLINE': os.linesep,
  970. 'NL': os.linesep,
  971. }
  972. KEY_DESCRIPTIONS = {
  973. 'bpath': 'verbatim POSIX path, can contain any character except NUL',
  974. 'path': 'path interpreted as text (might be missing non-text characters, see bpath)',
  975. 'source': 'link target for links (identical to linktarget)',
  976. 'extra': 'prepends {source} with " -> " for soft links and " link to " for hard links',
  977. 'csize': 'compressed size',
  978. 'num_chunks': 'number of chunks in this file',
  979. 'unique_chunks': 'number of unique chunks in this file',
  980. 'NEWLINE': 'OS dependent line separator',
  981. 'NL': 'alias of NEWLINE',
  982. 'NUL': 'NUL character for creating print0 / xargs -0 like ouput, see bpath',
  983. }
  984. KEY_GROUPS = (
  985. ('type', 'mode', 'uid', 'gid', 'user', 'group', 'path', 'bpath', 'source', 'linktarget', 'flags'),
  986. ('size', 'csize', 'num_chunks', 'unique_chunks'),
  987. ('mtime', 'ctime', 'atime', 'isomtime', 'isoctime', 'isoatime'),
  988. tuple(sorted(hashlib.algorithms_guaranteed)),
  989. ('archiveid', 'archivename', 'extra'),
  990. ('NEWLINE', 'NL', 'NUL', 'SPACE', 'TAB', 'CR', 'LF'),
  991. )
  992. @classmethod
  993. def available_keys(cls):
  994. class FakeArchive:
  995. fpr = name = ""
  996. fake_item = {
  997. b'mode': 0, b'path': '', b'user': '', b'group': '', b'mtime': 0,
  998. b'uid': 0, b'gid': 0,
  999. }
  1000. formatter = cls(FakeArchive, "")
  1001. keys = []
  1002. keys.extend(formatter.call_keys.keys())
  1003. keys.extend(formatter.get_item_data(fake_item).keys())
  1004. return keys
  1005. @classmethod
  1006. def keys_help(cls):
  1007. help = []
  1008. keys = cls.available_keys()
  1009. for group in cls.KEY_GROUPS:
  1010. for key in group:
  1011. keys.remove(key)
  1012. text = " - " + key
  1013. if key in cls.KEY_DESCRIPTIONS:
  1014. text += ": " + cls.KEY_DESCRIPTIONS[key]
  1015. help.append(text)
  1016. help.append("")
  1017. assert not keys, str(keys)
  1018. return "\n".join(help)
  1019. def __init__(self, archive, format):
  1020. self.archive = archive
  1021. static_keys = {
  1022. 'archivename': archive.name,
  1023. 'archiveid': archive.fpr,
  1024. }
  1025. static_keys.update(self.FIXED_KEYS)
  1026. self.format = partial_format(format, static_keys)
  1027. self.format_keys = {f[1] for f in Formatter().parse(format)}
  1028. self.call_keys = {
  1029. 'size': self.calculate_size,
  1030. 'csize': self.calculate_csize,
  1031. 'num_chunks': self.calculate_num_chunks,
  1032. 'unique_chunks': self.calculate_unique_chunks,
  1033. 'isomtime': partial(self.format_time, b'mtime'),
  1034. 'isoctime': partial(self.format_time, b'ctime'),
  1035. 'isoatime': partial(self.format_time, b'atime'),
  1036. 'mtime': partial(self.time, b'mtime'),
  1037. 'ctime': partial(self.time, b'ctime'),
  1038. 'atime': partial(self.time, b'atime'),
  1039. }
  1040. for hash_function in hashlib.algorithms_guaranteed:
  1041. self.add_key(hash_function, partial(self.hash_item, hash_function))
  1042. self.used_call_keys = set(self.call_keys) & self.format_keys
  1043. self.item_data = static_keys
  1044. def add_key(self, key, callable_with_item):
  1045. self.call_keys[key] = callable_with_item
  1046. self.used_call_keys = set(self.call_keys) & self.format_keys
  1047. def get_item_data(self, item):
  1048. mode = stat.filemode(item[b'mode'])
  1049. item_type = mode[0]
  1050. item_data = self.item_data
  1051. source = item.get(b'source', '')
  1052. extra = ''
  1053. if source:
  1054. source = remove_surrogates(source)
  1055. if item_type == 'l':
  1056. extra = ' -> %s' % source
  1057. else:
  1058. mode = 'h' + mode[1:]
  1059. extra = ' link to %s' % source
  1060. item_data['type'] = item_type
  1061. item_data['mode'] = mode
  1062. item_data['user'] = item[b'user'] or item[b'uid']
  1063. item_data['group'] = item[b'group'] or item[b'gid']
  1064. item_data['uid'] = item[b'uid']
  1065. item_data['gid'] = item[b'gid']
  1066. item_data['path'] = remove_surrogates(item[b'path'])
  1067. item_data['bpath'] = item[b'path']
  1068. item_data['source'] = source
  1069. item_data['linktarget'] = source
  1070. item_data['extra'] = extra
  1071. item_data['flags'] = item.get(b'bsdflags')
  1072. for key in self.used_call_keys:
  1073. item_data[key] = self.call_keys[key](item)
  1074. return item_data
  1075. def format_item(self, item):
  1076. return self.format.format_map(self.get_item_data(item))
  1077. def calculate_num_chunks(self, item):
  1078. return len(item.get(b'chunks', []))
  1079. def calculate_unique_chunks(self, item):
  1080. chunk_index = self.archive.cache.chunks
  1081. return sum(1 for c in item.get(b'chunks', []) if chunk_index[c.id].refcount == 1)
  1082. def calculate_size(self, item):
  1083. return sum(c.size for c in item.get(b'chunks', []))
  1084. def calculate_csize(self, item):
  1085. return sum(c.csize for c in item.get(b'chunks', []))
  1086. def hash_item(self, hash_function, item):
  1087. if b'chunks' not in item:
  1088. return ""
  1089. hash = hashlib.new(hash_function)
  1090. for _, data in self.archive.pipeline.fetch_many([c.id for c in item[b'chunks']]):
  1091. hash.update(data)
  1092. return hash.hexdigest()
  1093. def format_time(self, key, item):
  1094. return format_time(safe_timestamp(item.get(key) or item[b'mtime']))
  1095. def time(self, key, item):
  1096. return safe_timestamp(item.get(key) or item[b'mtime'])
  1097. class ChunkIteratorFileWrapper:
  1098. """File-like wrapper for chunk iterators"""
  1099. def __init__(self, chunk_iterator):
  1100. self.chunk_iterator = chunk_iterator
  1101. self.chunk_offset = 0
  1102. self.chunk = b''
  1103. self.exhausted = False
  1104. def _refill(self):
  1105. remaining = len(self.chunk) - self.chunk_offset
  1106. if not remaining:
  1107. try:
  1108. chunk = next(self.chunk_iterator)
  1109. self.chunk = memoryview(chunk.data)
  1110. except StopIteration:
  1111. self.exhausted = True
  1112. return 0 # EOF
  1113. self.chunk_offset = 0
  1114. remaining = len(self.chunk)
  1115. return remaining
  1116. def _read(self, nbytes):
  1117. if not nbytes:
  1118. return b''
  1119. remaining = self._refill()
  1120. will_read = min(remaining, nbytes)
  1121. self.chunk_offset += will_read
  1122. return self.chunk[self.chunk_offset - will_read:self.chunk_offset]
  1123. def read(self, nbytes):
  1124. parts = []
  1125. while nbytes and not self.exhausted:
  1126. read_data = self._read(nbytes)
  1127. nbytes -= len(read_data)
  1128. parts.append(read_data)
  1129. return b''.join(parts)
  1130. def open_item(archive, item):
  1131. """Return file-like object for archived item (with chunks)."""
  1132. chunk_iterator = archive.pipeline.fetch_many([c.id for c in item[b'chunks']])
  1133. return ChunkIteratorFileWrapper(chunk_iterator)
  1134. def file_status(mode):
  1135. if stat.S_ISREG(mode):
  1136. return 'A'
  1137. elif stat.S_ISDIR(mode):
  1138. return 'd'
  1139. elif stat.S_ISBLK(mode):
  1140. return 'b'
  1141. elif stat.S_ISCHR(mode):
  1142. return 'c'
  1143. elif stat.S_ISLNK(mode):
  1144. return 's'
  1145. elif stat.S_ISFIFO(mode):
  1146. return 'f'
  1147. return '?'
  1148. def consume(iterator, n=None):
  1149. """Advance the iterator n-steps ahead. If n is none, consume entirely."""
  1150. # Use functions that consume iterators at C speed.
  1151. if n is None:
  1152. # feed the entire iterator into a zero-length deque
  1153. deque(iterator, maxlen=0)
  1154. else:
  1155. # advance to the empty slice starting at position n
  1156. next(islice(iterator, n, n), None)
  1157. # GenericDirEntry, scandir_generic (c) 2012 Ben Hoyt
  1158. # from the python-scandir package (3-clause BSD license, just like us, so no troubles here)
  1159. # note: simplified version
  1160. class GenericDirEntry:
  1161. __slots__ = ('name', '_scandir_path', '_path')
  1162. def __init__(self, scandir_path, name):
  1163. self._scandir_path = scandir_path
  1164. self.name = name
  1165. self._path = None
  1166. @property
  1167. def path(self):
  1168. if self._path is None:
  1169. self._path = os.path.join(self._scandir_path, self.name)
  1170. return self._path
  1171. def stat(self, follow_symlinks=True):
  1172. assert not follow_symlinks
  1173. return os.lstat(self.path)
  1174. def _check_type(self, type):
  1175. st = self.stat(False)
  1176. return stat.S_IFMT(st.st_mode) == type
  1177. def is_dir(self, follow_symlinks=True):
  1178. assert not follow_symlinks
  1179. return self._check_type(stat.S_IFDIR)
  1180. def is_file(self, follow_symlinks=True):
  1181. assert not follow_symlinks
  1182. return self._check_type(stat.S_IFREG)
  1183. def is_symlink(self):
  1184. return self._check_type(stat.S_IFLNK)
  1185. def inode(self):
  1186. st = self.stat(False)
  1187. return st.st_ino
  1188. def __repr__(self):
  1189. return '<{0}: {1!r}>'.format(self.__class__.__name__, self.path)
  1190. def scandir_generic(path='.'):
  1191. """Like os.listdir(), but yield DirEntry objects instead of returning a list of names."""
  1192. for name in sorted(os.listdir(path)):
  1193. yield GenericDirEntry(path, name)
  1194. try:
  1195. from os import scandir
  1196. except ImportError:
  1197. try:
  1198. # Try python-scandir on Python 3.4
  1199. from scandir import scandir
  1200. except ImportError:
  1201. # If python-scandir is not installed, then use a version that is just as slow as listdir.
  1202. scandir = scandir_generic
  1203. def scandir_inorder(path='.'):
  1204. return sorted(scandir(path), key=lambda dirent: dirent.inode())
  1205. def clean_lines(lines, lstrip=None, rstrip=None, remove_empty=True, remove_comments=True):
  1206. """
  1207. clean lines (usually read from a config file):
  1208. 1. strip whitespace (left and right), 2. remove empty lines, 3. remove comments.
  1209. note: only "pure comment lines" are supported, no support for "trailing comments".
  1210. :param lines: input line iterator (e.g. list or open text file) that gives unclean input lines
  1211. :param lstrip: lstrip call arguments or False, if lstripping is not desired
  1212. :param rstrip: rstrip call arguments or False, if rstripping is not desired
  1213. :param remove_comments: remove comment lines (lines starting with "#")
  1214. :param remove_empty: remove empty lines
  1215. :return: yields processed lines
  1216. """
  1217. for line in lines:
  1218. if lstrip is not False:
  1219. line = line.lstrip(lstrip)
  1220. if rstrip is not False:
  1221. line = line.rstrip(rstrip)
  1222. if remove_empty and not line:
  1223. continue
  1224. if remove_comments and line.startswith('#'):
  1225. continue
  1226. yield line
  1227. class CompressionDecider1:
  1228. def __init__(self, compression, compression_files):
  1229. """
  1230. Initialize a CompressionDecider instance (and read config files, if needed).
  1231. :param compression: default CompressionSpec (e.g. from --compression option)
  1232. :param compression_files: list of compression config files (e.g. from --compression-from) or
  1233. a list of other line iterators
  1234. """
  1235. self.compression = compression
  1236. if not compression_files:
  1237. self.matcher = None
  1238. else:
  1239. self.matcher = PatternMatcher(fallback=compression)
  1240. for file in compression_files:
  1241. try:
  1242. for line in clean_lines(file):
  1243. try:
  1244. compr_spec, fn_pattern = line.split(':', 1)
  1245. except:
  1246. continue
  1247. self.matcher.add([parse_pattern(fn_pattern)], CompressionSpec(compr_spec))
  1248. finally:
  1249. if hasattr(file, 'close'):
  1250. file.close()
  1251. def decide(self, path):
  1252. if self.matcher is not None:
  1253. return self.matcher.match(path)
  1254. return self.compression
  1255. class CompressionDecider2:
  1256. def __init__(self, compression):
  1257. self.compression = compression
  1258. def decide(self, chunk):
  1259. # nothing fancy here yet: we either use what the metadata says or the default
  1260. # later, we can decide based on the chunk data also.
  1261. # if we compress the data here to decide, we can even update the chunk data
  1262. # and modify the metadata as desired.
  1263. compr_spec = chunk.meta.get('compress', self.compression)
  1264. compr_args = dict(buffer=COMPR_BUFFER)
  1265. compr_args.update(compr_spec)
  1266. if compr_args['name'] == 'auto':
  1267. # we did not decide yet, use heuristic:
  1268. compr_args, chunk = self.heuristic_lz4(compr_args, chunk)
  1269. return compr_args, chunk
  1270. def heuristic_lz4(self, compr_args, chunk):
  1271. meta, data = chunk
  1272. lz4 = get_compressor('lz4', buffer=compr_args['buffer'])
  1273. cdata = lz4.compress(data)
  1274. data_len = len(data)
  1275. cdata_len = len(cdata)
  1276. if cdata_len < data_len:
  1277. compr_spec = compr_args['spec']
  1278. else:
  1279. # uncompressible - we could have a special "uncompressible compressor"
  1280. # that marks such data as uncompressible via compression-type metadata.
  1281. compr_spec = CompressionSpec('none')
  1282. compr_args.update(compr_spec)
  1283. logger.debug("len(data) == %d, len(lz4(data)) == %d, choosing %s", data_len, cdata_len, compr_spec)
  1284. return compr_args, Chunk(data, **meta)