helpers.py 57 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621
  1. import argparse
  2. from binascii import hexlify
  3. from collections import namedtuple
  4. import contextlib
  5. import enum
  6. from functools import wraps
  7. import grp
  8. import os
  9. import stat
  10. import textwrap
  11. import pwd
  12. import re
  13. from shutil import get_terminal_size
  14. import sys
  15. import platform
  16. import signal
  17. import threading
  18. import time
  19. import unicodedata
  20. import io
  21. import errno
  22. import logging
  23. from .logger import create_logger
  24. logger = create_logger()
  25. from datetime import datetime, timezone, timedelta
  26. from fnmatch import translate
  27. from operator import attrgetter
  28. from . import __version__ as borg_version
  29. from . import __version_tuple__ as borg_version_tuple
  30. from . import hashindex
  31. from . import chunker
  32. from . import crypto
  33. from . import shellpattern
  34. import msgpack
  35. import msgpack.fallback
  36. import socket
  37. # never use datetime.isoformat(), it is evil. always use one of these:
  38. # datetime.strftime(ISO_FORMAT) # output always includes .microseconds
  39. # datetime.strftime(ISO_FORMAT_NO_USECS) # output never includes microseconds
  40. ISO_FORMAT_NO_USECS = '%Y-%m-%dT%H:%M:%S'
  41. ISO_FORMAT = ISO_FORMAT_NO_USECS + '.%f'
  42. # 20 MiB minus 41 bytes for a Repository header (because the "size" field in the Repository includes
  43. # the header, and the total size was set to 20 MiB).
  44. MAX_DATA_SIZE = 20971479
  45. # MAX_OBJECT_SIZE = <20 MiB (MAX_DATA_SIZE) + 41 bytes for a Repository PUT header, which consists of
  46. # a 1 byte tag ID, 4 byte CRC, 4 byte size and 32 bytes for the ID.
  47. MAX_OBJECT_SIZE = MAX_DATA_SIZE + 41 # see LoggedIO.put_header_fmt.size assertion in repository module
  48. assert MAX_OBJECT_SIZE == 20971520 == 20 * 1024 * 1024
  49. # borg.remote read() buffer size
  50. BUFSIZE = 10 * 1024 * 1024
  51. # to use a safe, limited unpacker, we need to set a upper limit to the archive count in the manifest.
  52. # this does not mean that you can always really reach that number, because it also needs to be less than
  53. # MAX_DATA_SIZE or it will trigger the check for that.
  54. MAX_ARCHIVES = 400000
  55. # repo.list() / .scan() result count limit the borg client uses
  56. LIST_SCAN_LIMIT = 10000
  57. # return codes returned by borg command
  58. # when borg is killed by signal N, rc = 128 + N
  59. EXIT_SUCCESS = 0 # everything done, no problems
  60. EXIT_WARNING = 1 # reached normal end of operation, but there were issues
  61. EXIT_ERROR = 2 # terminated abruptly, did not reach end of operation
  62. '''
  63. The global exit_code variable is used so that modules other than archiver can increase the program exit code if a
  64. warning or error occured during their operation. This is different from archiver.exit_code, which is only accessible
  65. from the archiver object.
  66. '''
  67. exit_code = EXIT_SUCCESS
  68. def set_ec(ec):
  69. '''
  70. Sets the exit code of the program, if an exit code higher or equal than this is set, this does nothing. This
  71. makes EXIT_ERROR override EXIT_WARNING, etc..
  72. ec: exit code to set
  73. '''
  74. global exit_code
  75. exit_code = max(exit_code, ec)
  76. return exit_code
  77. class Error(Exception):
  78. """Error base class"""
  79. # if we raise such an Error and it is only catched by the uppermost
  80. # exception handler (that exits short after with the given exit_code),
  81. # it is always a (fatal and abrupt) EXIT_ERROR, never just a warning.
  82. exit_code = EXIT_ERROR
  83. # show a traceback?
  84. traceback = False
  85. def __init__(self, *args):
  86. super().__init__(*args)
  87. self.args = args
  88. def get_message(self):
  89. return type(self).__doc__.format(*self.args)
  90. __str__ = get_message
  91. class ErrorWithTraceback(Error):
  92. """like Error, but show a traceback also"""
  93. traceback = True
  94. class IntegrityError(ErrorWithTraceback):
  95. """Data integrity error: {}"""
  96. class DecompressionError(IntegrityError):
  97. """Decompression error: {}"""
  98. class ExtensionModuleError(Error):
  99. """The Borg binary extension modules do not seem to be properly installed"""
  100. class NoManifestError(Error):
  101. """Repository has no manifest."""
  102. class PlaceholderError(Error):
  103. """Formatting Error: "{}".format({}): {}({})"""
  104. class MandatoryFeatureUnsupported(Error):
  105. """Unsupported repository feature(s) {}. A newer version of borg is required to access this repository."""
  106. class PythonLibcTooOld(Error):
  107. """FATAL: this Python was compiled for a too old (g)libc and misses required functionality."""
  108. def check_python():
  109. required_funcs = {os.stat, os.utime, os.chown}
  110. if not os.supports_follow_symlinks.issuperset(required_funcs):
  111. raise PythonLibcTooOld
  112. def check_extension_modules():
  113. from . import platform, compress
  114. if hashindex.API_VERSION != '1.0_01':
  115. raise ExtensionModuleError
  116. if chunker.API_VERSION != '1.0_01':
  117. raise ExtensionModuleError
  118. if compress.API_VERSION != '1.0_01':
  119. raise ExtensionModuleError
  120. if crypto.API_VERSION != '1.0_01':
  121. raise ExtensionModuleError
  122. if platform.API_VERSION != '1.0_01':
  123. raise ExtensionModuleError
  124. def get_limited_unpacker(kind):
  125. """return a limited Unpacker because we should not trust msgpack data received from remote"""
  126. args = dict(use_list=False, # return tuples, not lists
  127. max_bin_len=0, # not used
  128. max_ext_len=0, # not used
  129. max_buffer_size=3 * max(BUFSIZE, MAX_OBJECT_SIZE),
  130. max_str_len=MAX_OBJECT_SIZE, # a chunk or other repo object
  131. )
  132. if kind == 'server':
  133. args.update(dict(max_array_len=100, # misc. cmd tuples
  134. max_map_len=100, # misc. cmd dicts
  135. ))
  136. elif kind == 'client':
  137. args.update(dict(max_array_len=LIST_SCAN_LIMIT, # result list from repo.list() / .scan()
  138. max_map_len=100, # misc. result dicts
  139. ))
  140. elif kind == 'manifest':
  141. args.update(dict(use_list=True, # default value
  142. max_array_len=100, # ITEM_KEYS ~= 22
  143. max_map_len=MAX_ARCHIVES, # list of archives
  144. max_str_len=255, # archive name
  145. object_hook=StableDict,
  146. unicode_errors='surrogateescape',
  147. ))
  148. elif kind == 'key':
  149. args.update(dict(use_list=True, # default value
  150. max_array_len=0, # not used
  151. max_map_len=10, # EncryptedKey dict
  152. max_str_len=4000, # inner key data
  153. object_hook=StableDict,
  154. unicode_errors='surrogateescape',
  155. ))
  156. else:
  157. raise ValueError('kind must be "server", "client", "manifest" or "key"')
  158. return msgpack.Unpacker(**args)
  159. class Manifest:
  160. @enum.unique
  161. class Operation(enum.Enum):
  162. # The comments here only roughly describe the scope of each feature. In the end, additions need to be
  163. # based on potential problems older clients could produce when accessing newer repositories and the
  164. # tradeofs of locking version out or still allowing access. As all older versions and their exact
  165. # behaviours are known when introducing new features sometimes this might not match the general descriptions
  166. # below.
  167. # The READ operation describes which features are needed to safely list and extract the archives in the
  168. # repository.
  169. READ = 'read'
  170. # The CHECK operation is for all operations that need either to understand every detail
  171. # of the repository (for consistency checks and repairs) or are seldom used functions that just
  172. # should use the most restrictive feature set because more fine grained compatibility tracking is
  173. # not needed.
  174. CHECK = 'check'
  175. # The WRITE operation is for adding archives. Features here ensure that older clients don't add archives
  176. # in an old format, or is used to lock out clients that for other reasons can no longer safely add new
  177. # archives.
  178. WRITE = 'write'
  179. # The DELETE operation is for all operations (like archive deletion) that need a 100% correct reference
  180. # count and the need to be able to find all (directly and indirectly) referenced chunks of a given archive.
  181. DELETE = 'delete'
  182. NO_OPERATION_CHECK = tuple()
  183. SUPPORTED_REPO_FEATURES = frozenset([])
  184. MANIFEST_ID = b'\0' * 32
  185. def __init__(self, key, repository, item_keys=None):
  186. from .archive import ITEM_KEYS
  187. self.archives = {}
  188. self.config = {}
  189. self.key = key
  190. self.repository = repository
  191. self.item_keys = frozenset(item_keys) if item_keys is not None else ITEM_KEYS
  192. self.tam_verified = False
  193. self.timestamp = None
  194. @classmethod
  195. def load(cls, repository, operations, key=None, force_tam_not_required=False):
  196. from .key import key_factory, tam_required_file, tam_required
  197. from .repository import Repository
  198. from .archive import ITEM_KEYS
  199. try:
  200. cdata = repository.get(cls.MANIFEST_ID)
  201. except Repository.ObjectNotFound:
  202. raise NoManifestError
  203. if not key:
  204. key = key_factory(repository, cdata)
  205. manifest = cls(key, repository)
  206. data = key.decrypt(None, cdata)
  207. m, manifest.tam_verified = key.unpack_and_verify_manifest(data, force_tam_not_required=force_tam_not_required)
  208. manifest.id = key.id_hash(data)
  209. if m.get(b'version') not in (1, 2):
  210. raise ValueError('Invalid manifest version')
  211. manifest.archives = dict((k.decode('utf-8'), v) for k, v in m[b'archives'].items())
  212. manifest.timestamp = m.get(b'timestamp')
  213. if manifest.timestamp:
  214. manifest.timestamp = manifest.timestamp.decode('ascii')
  215. manifest.config = m[b'config']
  216. # valid item keys are whatever is known in the repo or every key we know
  217. manifest.item_keys = frozenset(m.get(b'item_keys', [])) | ITEM_KEYS
  218. if manifest.tam_verified:
  219. manifest_required = manifest.config.get(b'tam_required', False)
  220. security_required = tam_required(repository)
  221. if manifest_required and not security_required:
  222. logger.debug('Manifest is TAM verified and says TAM is required, updating security database...')
  223. file = tam_required_file(repository)
  224. open(file, 'w').close()
  225. if not manifest_required and security_required:
  226. logger.debug('Manifest is TAM verified and says TAM is *not* required, updating security database...')
  227. os.unlink(tam_required_file(repository))
  228. manifest.check_repository_compatibility(operations)
  229. return manifest, key
  230. def check_repository_compatibility(self, operations):
  231. for operation in operations:
  232. assert isinstance(operation, self.Operation)
  233. feature_flags = self.config.get(b'feature_flags', None)
  234. if feature_flags is None:
  235. return
  236. if operation.value.encode() not in feature_flags:
  237. continue
  238. requirements = feature_flags[operation.value.encode()]
  239. if b'mandatory' in requirements:
  240. unsupported = set(requirements[b'mandatory']) - self.SUPPORTED_REPO_FEATURES
  241. if unsupported:
  242. raise MandatoryFeatureUnsupported([f.decode() for f in unsupported])
  243. def get_all_mandatory_features(self):
  244. result = {}
  245. feature_flags = self.config.get(b'feature_flags', None)
  246. if feature_flags is None:
  247. return result
  248. for operation, requirements in feature_flags.items():
  249. if b'mandatory' in requirements:
  250. result[operation.decode()] = set([feature.decode() for feature in requirements[b'mandatory']])
  251. return result
  252. def write(self):
  253. if self.key.tam_required:
  254. self.config[b'tam_required'] = True
  255. # self.timestamp needs to be strictly monotonically increasing. Clocks often are not set correctly
  256. if self.timestamp is None:
  257. self.timestamp = datetime.utcnow().strftime(ISO_FORMAT)
  258. else:
  259. prev_ts = parse_timestamp(self.timestamp, tzinfo=None)
  260. incremented = (prev_ts + timedelta(microseconds=1)).strftime(ISO_FORMAT)
  261. self.timestamp = max(incremented, datetime.utcnow().strftime(ISO_FORMAT))
  262. # include checks for limits as enforced by limited unpacker (used by load())
  263. assert len(self.archives) <= MAX_ARCHIVES
  264. assert all(len(name) <= 255 for name in self.archives)
  265. assert len(self.item_keys) <= 100
  266. m = {
  267. 'version': 1,
  268. 'archives': StableDict((name, StableDict(archive)) for name, archive in self.archives.items()),
  269. 'timestamp': self.timestamp,
  270. 'config': StableDict(self.config),
  271. 'item_keys': tuple(sorted(self.item_keys)),
  272. }
  273. self.tam_verified = True
  274. data = self.key.pack_and_authenticate_metadata(m)
  275. self.id = self.key.id_hash(data)
  276. self.repository.put(self.MANIFEST_ID, self.key.encrypt(data, none_compression=True))
  277. def list_archive_infos(self, sort_by=None, reverse=False):
  278. # inexpensive Archive.list_archives replacement if we just need .name, .id, .ts
  279. ArchiveInfo = namedtuple('ArchiveInfo', 'name id ts')
  280. archives = []
  281. for name, values in self.archives.items():
  282. ts = parse_timestamp(values[b'time'].decode('utf-8'))
  283. id = values[b'id']
  284. archives.append(ArchiveInfo(name=name, id=id, ts=ts))
  285. if sort_by is not None:
  286. archives = sorted(archives, key=attrgetter(sort_by), reverse=reverse)
  287. return archives
  288. def interval(s):
  289. """Convert a string representing a valid interval to a number of hours."""
  290. multiplier = {'H': 1, 'd': 24, 'w': 24 * 7, 'm': 24 * 31, 'y': 24 * 365}
  291. if s.endswith(tuple(multiplier.keys())):
  292. number = s[:-1]
  293. suffix = s[-1]
  294. else:
  295. # range suffixes in ascending multiplier order
  296. ranges = [k for k, v in sorted(multiplier.items(), key=lambda t: t[1])]
  297. raise argparse.ArgumentTypeError(
  298. 'Unexpected interval time unit "%s": expected one of %r' % (s[-1], ranges))
  299. try:
  300. hours = int(number) * multiplier[suffix]
  301. except ValueError:
  302. hours = -1
  303. if hours <= 0:
  304. raise argparse.ArgumentTypeError(
  305. 'Unexpected interval number "%s": expected an integer greater than 0' % number)
  306. return hours
  307. def prune_within(archives, hours):
  308. target = datetime.now(timezone.utc) - timedelta(seconds=hours * 3600)
  309. return [a for a in archives if a.ts > target]
  310. def prune_split(archives, pattern, n, skip=[]):
  311. last = None
  312. keep = []
  313. if n == 0:
  314. return keep
  315. for a in sorted(archives, key=attrgetter('ts'), reverse=True):
  316. period = to_localtime(a.ts).strftime(pattern)
  317. if period != last:
  318. last = period
  319. if a not in skip:
  320. keep.append(a)
  321. if len(keep) == n:
  322. break
  323. return keep
  324. class Statistics:
  325. def __init__(self):
  326. self.osize = self.csize = self.usize = self.nfiles = 0
  327. self.last_progress = 0 # timestamp when last progress was shown
  328. def update(self, size, csize, unique):
  329. self.osize += size
  330. self.csize += csize
  331. if unique:
  332. self.usize += csize
  333. summary = """\
  334. Original size Compressed size Deduplicated size
  335. {label:15} {stats.osize_fmt:>20s} {stats.csize_fmt:>20s} {stats.usize_fmt:>20s}"""
  336. def __str__(self):
  337. return self.summary.format(stats=self, label='This archive:')
  338. def __repr__(self):
  339. return "<{cls} object at {hash:#x} ({self.osize}, {self.csize}, {self.usize})>".format(cls=type(self).__name__, hash=id(self), self=self)
  340. @property
  341. def osize_fmt(self):
  342. return format_file_size(self.osize)
  343. @property
  344. def usize_fmt(self):
  345. return format_file_size(self.usize)
  346. @property
  347. def csize_fmt(self):
  348. return format_file_size(self.csize)
  349. def show_progress(self, item=None, final=False, stream=None, dt=None):
  350. now = time.monotonic()
  351. if dt is None or now - self.last_progress > dt:
  352. self.last_progress = now
  353. columns, lines = get_terminal_size()
  354. if not final:
  355. msg = '{0.osize_fmt} O {0.csize_fmt} C {0.usize_fmt} D {0.nfiles} N '.format(self)
  356. path = remove_surrogates(item[b'path']) if item else ''
  357. space = columns - len(msg)
  358. if space < 12:
  359. msg = ''
  360. space = columns - len(msg)
  361. if space >= 8:
  362. if space < len('...') + len(path):
  363. path = '%s...%s' % (path[:(space // 2) - len('...')], path[-space // 2:])
  364. msg += "{0:<{space}}".format(path, space=space)
  365. else:
  366. msg = ' ' * columns
  367. print(msg, file=stream or sys.stderr, end="\r", flush=True)
  368. def get_keys_dir():
  369. """Determine where to repository keys and cache"""
  370. xdg_config = os.environ.get('XDG_CONFIG_HOME', os.path.join(os.path.expanduser('~'), '.config'))
  371. keys_dir = os.environ.get('BORG_KEYS_DIR', os.path.join(xdg_config, 'borg', 'keys'))
  372. if not os.path.exists(keys_dir):
  373. os.makedirs(keys_dir)
  374. os.chmod(keys_dir, stat.S_IRWXU)
  375. return keys_dir
  376. def get_security_dir(repository_id=None):
  377. """Determine where to store local security information."""
  378. xdg_config = os.environ.get('XDG_CONFIG_HOME', os.path.join(os.path.expanduser('~'), '.config'))
  379. security_dir = os.environ.get('BORG_SECURITY_DIR', os.path.join(xdg_config, 'borg', 'security'))
  380. if repository_id:
  381. security_dir = os.path.join(security_dir, repository_id)
  382. if not os.path.exists(security_dir):
  383. os.makedirs(security_dir)
  384. os.chmod(security_dir, stat.S_IRWXU)
  385. return security_dir
  386. def get_cache_dir():
  387. """Determine where to repository keys and cache"""
  388. xdg_cache = os.environ.get('XDG_CACHE_HOME', os.path.join(os.path.expanduser('~'), '.cache'))
  389. cache_dir = os.environ.get('BORG_CACHE_DIR', os.path.join(xdg_cache, 'borg'))
  390. if not os.path.exists(cache_dir):
  391. os.makedirs(cache_dir)
  392. os.chmod(cache_dir, stat.S_IRWXU)
  393. with open(os.path.join(cache_dir, 'CACHEDIR.TAG'), 'w') as fd:
  394. fd.write(textwrap.dedent("""
  395. Signature: 8a477f597d28d172789f06886806bc55
  396. # This file is a cache directory tag created by Borg.
  397. # For information about cache directory tags, see:
  398. # http://www.brynosaurus.com/cachedir/
  399. """).lstrip())
  400. return cache_dir
  401. def to_localtime(ts):
  402. """Convert datetime object from UTC to local time zone"""
  403. return datetime(*time.localtime((ts - datetime(1970, 1, 1, tzinfo=timezone.utc)).total_seconds())[:6])
  404. def parse_timestamp(timestamp, tzinfo=timezone.utc):
  405. """Parse a ISO 8601 timestamp string"""
  406. fmt = ISO_FORMAT if '.' in timestamp else ISO_FORMAT_NO_USECS
  407. dt = datetime.strptime(timestamp, fmt)
  408. if tzinfo is not None:
  409. dt = dt.replace(tzinfo=tzinfo)
  410. return dt
  411. def load_excludes(fh):
  412. """Load and parse exclude patterns from file object. Lines empty or starting with '#' after stripping whitespace on
  413. both line ends are ignored.
  414. """
  415. patterns = (line for line in (i.strip() for i in fh) if not line.startswith('#'))
  416. return [parse_pattern(pattern) for pattern in patterns if pattern]
  417. def update_excludes(args):
  418. """Merge exclude patterns from files with those on command line."""
  419. if hasattr(args, 'exclude_files') and args.exclude_files:
  420. if not hasattr(args, 'excludes') or args.excludes is None:
  421. args.excludes = []
  422. for file in args.exclude_files:
  423. args.excludes += load_excludes(file)
  424. file.close()
  425. class PatternMatcher:
  426. def __init__(self, fallback=None):
  427. self._items = []
  428. # Value to return from match function when none of the patterns match.
  429. self.fallback = fallback
  430. def add(self, patterns, value):
  431. """Add list of patterns to internal list. The given value is returned from the match function when one of the
  432. given patterns matches.
  433. """
  434. self._items.extend((i, value) for i in patterns)
  435. def match(self, path):
  436. for (pattern, value) in self._items:
  437. if pattern.match(path):
  438. return value
  439. return self.fallback
  440. def normalized(func):
  441. """ Decorator for the Pattern match methods, returning a wrapper that
  442. normalizes OSX paths to match the normalized pattern on OSX, and
  443. returning the original method on other platforms"""
  444. @wraps(func)
  445. def normalize_wrapper(self, path):
  446. return func(self, unicodedata.normalize("NFD", path))
  447. if sys.platform in ('darwin',):
  448. # HFS+ converts paths to a canonical form, so users shouldn't be
  449. # required to enter an exact match
  450. return normalize_wrapper
  451. else:
  452. # Windows and Unix filesystems allow different forms, so users
  453. # always have to enter an exact match
  454. return func
  455. class PatternBase:
  456. """Shared logic for inclusion/exclusion patterns.
  457. """
  458. PREFIX = NotImplemented
  459. def __init__(self, pattern):
  460. self.pattern_orig = pattern
  461. self.match_count = 0
  462. if sys.platform in ('darwin',):
  463. pattern = unicodedata.normalize("NFD", pattern)
  464. self._prepare(pattern)
  465. @normalized
  466. def match(self, path):
  467. matches = self._match(path)
  468. if matches:
  469. self.match_count += 1
  470. return matches
  471. def __repr__(self):
  472. return '%s(%s)' % (type(self), self.pattern)
  473. def __str__(self):
  474. return self.pattern_orig
  475. def _prepare(self, pattern):
  476. raise NotImplementedError
  477. def _match(self, path):
  478. raise NotImplementedError
  479. # For PathPrefixPattern, FnmatchPattern and ShellPattern, we require that the pattern either match the whole path
  480. # or an initial segment of the path up to but not including a path separator. To unify the two cases, we add a path
  481. # separator to the end of the path before matching.
  482. class PathPrefixPattern(PatternBase):
  483. """Literal files or directories listed on the command line
  484. for some operations (e.g. extract, but not create).
  485. If a directory is specified, all paths that start with that
  486. path match as well. A trailing slash makes no difference.
  487. """
  488. PREFIX = "pp"
  489. def _prepare(self, pattern):
  490. self.pattern = os.path.normpath(pattern).rstrip(os.path.sep) + os.path.sep
  491. def _match(self, path):
  492. return (path + os.path.sep).startswith(self.pattern)
  493. class FnmatchPattern(PatternBase):
  494. """Shell glob patterns to exclude. A trailing slash means to
  495. exclude the contents of a directory, but not the directory itself.
  496. """
  497. PREFIX = "fm"
  498. def _prepare(self, pattern):
  499. if pattern.endswith(os.path.sep):
  500. pattern = os.path.normpath(pattern).rstrip(os.path.sep) + os.path.sep + '*' + os.path.sep
  501. else:
  502. pattern = os.path.normpath(pattern) + os.path.sep + '*'
  503. self.pattern = pattern
  504. # fnmatch and re.match both cache compiled regular expressions.
  505. # Nevertheless, this is about 10 times faster.
  506. self.regex = re.compile(translate(self.pattern))
  507. def _match(self, path):
  508. return (self.regex.match(path + os.path.sep) is not None)
  509. class ShellPattern(PatternBase):
  510. """Shell glob patterns to exclude. A trailing slash means to
  511. exclude the contents of a directory, but not the directory itself.
  512. """
  513. PREFIX = "sh"
  514. def _prepare(self, pattern):
  515. sep = os.path.sep
  516. if pattern.endswith(sep):
  517. pattern = os.path.normpath(pattern).rstrip(sep) + sep + "**" + sep + "*" + sep
  518. else:
  519. pattern = os.path.normpath(pattern) + sep + "**" + sep + "*"
  520. self.pattern = pattern
  521. self.regex = re.compile(shellpattern.translate(self.pattern))
  522. def _match(self, path):
  523. return (self.regex.match(path + os.path.sep) is not None)
  524. class RegexPattern(PatternBase):
  525. """Regular expression to exclude.
  526. """
  527. PREFIX = "re"
  528. def _prepare(self, pattern):
  529. self.pattern = pattern
  530. self.regex = re.compile(pattern)
  531. def _match(self, path):
  532. # Normalize path separators
  533. if os.path.sep != '/':
  534. path = path.replace(os.path.sep, '/')
  535. return (self.regex.search(path) is not None)
  536. _PATTERN_STYLES = set([
  537. FnmatchPattern,
  538. PathPrefixPattern,
  539. RegexPattern,
  540. ShellPattern,
  541. ])
  542. _PATTERN_STYLE_BY_PREFIX = dict((i.PREFIX, i) for i in _PATTERN_STYLES)
  543. def parse_pattern(pattern, fallback=FnmatchPattern):
  544. """Read pattern from string and return an instance of the appropriate implementation class.
  545. """
  546. if len(pattern) > 2 and pattern[2] == ":" and pattern[:2].isalnum():
  547. (style, pattern) = (pattern[:2], pattern[3:])
  548. cls = _PATTERN_STYLE_BY_PREFIX.get(style, None)
  549. if cls is None:
  550. raise ValueError("Unknown pattern style: {}".format(style))
  551. else:
  552. cls = fallback
  553. return cls(pattern)
  554. def timestamp(s):
  555. """Convert a --timestamp=s argument to a datetime object"""
  556. try:
  557. # is it pointing to a file / directory?
  558. ts = safe_s(os.stat(s).st_mtime)
  559. return datetime.utcfromtimestamp(ts)
  560. except OSError:
  561. # didn't work, try parsing as timestamp. UTC, no TZ, no microsecs support.
  562. for format in ('%Y-%m-%dT%H:%M:%SZ', '%Y-%m-%dT%H:%M:%S+00:00',
  563. '%Y-%m-%dT%H:%M:%S', '%Y-%m-%d %H:%M:%S',
  564. '%Y-%m-%dT%H:%M', '%Y-%m-%d %H:%M',
  565. '%Y-%m-%d', '%Y-%j',
  566. ):
  567. try:
  568. return datetime.strptime(s, format)
  569. except ValueError:
  570. continue
  571. raise ValueError
  572. def ChunkerParams(s):
  573. chunk_min, chunk_max, chunk_mask, window_size = s.split(',')
  574. if int(chunk_max) > 23:
  575. raise ValueError('max. chunk size exponent must not be more than 23 (2^23 = 8MiB max. chunk size)')
  576. return int(chunk_min), int(chunk_max), int(chunk_mask), int(window_size)
  577. def CompressionSpec(s):
  578. values = s.split(',')
  579. count = len(values)
  580. if count < 1:
  581. raise ValueError
  582. # --compression algo[,level]
  583. name = values[0]
  584. if name in ('none', 'lz4', ):
  585. return dict(name=name)
  586. if name in ('zlib', 'lzma', ):
  587. if count < 2:
  588. level = 6 # default compression level in py stdlib
  589. elif count == 2:
  590. level = int(values[1])
  591. if not 0 <= level <= 9:
  592. raise ValueError
  593. else:
  594. raise ValueError
  595. return dict(name=name, level=level)
  596. raise ValueError
  597. def PrefixSpec(s):
  598. return replace_placeholders(s)
  599. def dir_is_cachedir(path):
  600. """Determines whether the specified path is a cache directory (and
  601. therefore should potentially be excluded from the backup) according to
  602. the CACHEDIR.TAG protocol
  603. (http://www.brynosaurus.com/cachedir/spec.html).
  604. """
  605. tag_contents = b'Signature: 8a477f597d28d172789f06886806bc55'
  606. tag_path = os.path.join(path, 'CACHEDIR.TAG')
  607. try:
  608. if os.path.exists(tag_path):
  609. with open(tag_path, 'rb') as tag_file:
  610. tag_data = tag_file.read(len(tag_contents))
  611. if tag_data == tag_contents:
  612. return True
  613. except OSError:
  614. pass
  615. return False
  616. def dir_is_tagged(path, exclude_caches, exclude_if_present):
  617. """Determines whether the specified path is excluded by being a cache
  618. directory or containing user-specified tag files. Returns a list of the
  619. paths of the tag files (either CACHEDIR.TAG or the matching
  620. user-specified files).
  621. """
  622. tag_paths = []
  623. if exclude_caches and dir_is_cachedir(path):
  624. tag_paths.append(os.path.join(path, 'CACHEDIR.TAG'))
  625. if exclude_if_present is not None:
  626. for tag in exclude_if_present:
  627. tag_path = os.path.join(path, tag)
  628. if os.path.isfile(tag_path):
  629. tag_paths.append(tag_path)
  630. return tag_paths
  631. def format_line(format, data):
  632. try:
  633. return format.format(**data)
  634. except Exception as e:
  635. raise PlaceholderError(format, data, e.__class__.__name__, str(e))
  636. def replace_placeholders(text):
  637. """Replace placeholders in text with their values."""
  638. current_time = datetime.now()
  639. data = {
  640. 'pid': os.getpid(),
  641. 'fqdn': socket.getfqdn(),
  642. 'hostname': socket.gethostname(),
  643. 'now': current_time.now(),
  644. 'utcnow': current_time.utcnow(),
  645. 'user': uid2user(os.getuid(), os.getuid()),
  646. 'borgversion': borg_version,
  647. 'borgmajor': '%d' % borg_version_tuple[:1],
  648. 'borgminor': '%d.%d' % borg_version_tuple[:2],
  649. 'borgpatch': '%d.%d.%d' % borg_version_tuple[:3],
  650. }
  651. return format_line(text, data)
  652. # Not too rarely, we get crappy timestamps from the fs, that overflow some computations.
  653. # As they are crap anyway (valid filesystem timestamps always refer to the past up to
  654. # the present, but never to the future), nothing is lost if we just clamp them to the
  655. # maximum value we can support.
  656. # As long as people are using borg on 32bit platforms to access borg archives, we must
  657. # keep this value True. But we can expect that we can stop supporting 32bit platforms
  658. # well before coming close to the year 2038, so this will never be a practical problem.
  659. SUPPORT_32BIT_PLATFORMS = True # set this to False before y2038.
  660. if SUPPORT_32BIT_PLATFORMS:
  661. # second timestamps will fit into a signed int32 (platform time_t limit).
  662. # nanosecond timestamps thus will naturally fit into a signed int64.
  663. # subtract last 48h to avoid any issues that could be caused by tz calculations.
  664. # this is in the year 2038, so it is also less than y9999 (which is a datetime internal limit).
  665. # msgpack can pack up to uint64.
  666. MAX_S = 2**31-1 - 48*3600
  667. MAX_NS = MAX_S * 1000000000
  668. else:
  669. # nanosecond timestamps will fit into a signed int64.
  670. # subtract last 48h to avoid any issues that could be caused by tz calculations.
  671. # this is in the year 2262, so it is also less than y9999 (which is a datetime internal limit).
  672. # round down to 1e9 multiple, so MAX_NS corresponds precisely to a integer MAX_S.
  673. # msgpack can pack up to uint64.
  674. MAX_NS = (2**63-1 - 48*3600*1000000000) // 1000000000 * 1000000000
  675. MAX_S = MAX_NS // 1000000000
  676. def safe_s(ts):
  677. if 0 <= ts <= MAX_S:
  678. return ts
  679. elif ts < 0:
  680. return 0
  681. else:
  682. return MAX_S
  683. def safe_ns(ts):
  684. if 0 <= ts <= MAX_NS:
  685. return ts
  686. elif ts < 0:
  687. return 0
  688. else:
  689. return MAX_NS
  690. def safe_timestamp(item_timestamp_ns):
  691. t_ns = safe_ns(bigint_to_int(item_timestamp_ns))
  692. return datetime.fromtimestamp(t_ns / 1e9)
  693. def format_time(t):
  694. """use ISO-8601 date and time format
  695. """
  696. return t.strftime('%a, %Y-%m-%d %H:%M:%S')
  697. def format_timedelta(td):
  698. """Format timedelta in a human friendly format
  699. """
  700. # Since td.total_seconds() requires python 2.7
  701. ts = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6)
  702. s = ts % 60
  703. m = int(ts / 60) % 60
  704. h = int(ts / 3600) % 24
  705. txt = '%.2f seconds' % s
  706. if m:
  707. txt = '%d minutes %s' % (m, txt)
  708. if h:
  709. txt = '%d hours %s' % (h, txt)
  710. if td.days:
  711. txt = '%d days %s' % (td.days, txt)
  712. return txt
  713. def format_file_size(v, precision=2):
  714. """Format file size into a human friendly format
  715. """
  716. return sizeof_fmt_decimal(v, suffix='B', sep=' ', precision=precision)
  717. def sizeof_fmt(num, suffix='B', units=None, power=None, sep='', precision=2):
  718. for unit in units[:-1]:
  719. if abs(round(num, precision)) < power:
  720. if isinstance(num, int):
  721. return "{}{}{}{}".format(num, sep, unit, suffix)
  722. else:
  723. return "{:3.{}f}{}{}{}".format(num, precision, sep, unit, suffix)
  724. num /= float(power)
  725. return "{:.{}f}{}{}{}".format(num, precision, sep, units[-1], suffix)
  726. def sizeof_fmt_iec(num, suffix='B', sep='', precision=2):
  727. return sizeof_fmt(num, suffix=suffix, sep=sep, precision=precision, units=['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'], power=1024)
  728. def sizeof_fmt_decimal(num, suffix='B', sep='', precision=2):
  729. return sizeof_fmt(num, suffix=suffix, sep=sep, precision=precision, units=['', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y'], power=1000)
  730. def format_archive(archive):
  731. return '%-36s %s' % (archive.name, format_time(to_localtime(archive.ts)))
  732. def memoize(function):
  733. cache = {}
  734. def decorated_function(*args):
  735. try:
  736. return cache[args]
  737. except KeyError:
  738. val = function(*args)
  739. cache[args] = val
  740. return val
  741. return decorated_function
  742. class Buffer:
  743. """
  744. Provides a managed, resizable buffer.
  745. """
  746. class MemoryLimitExceeded(Error, OSError):
  747. """Requested buffer size {} is above the limit of {}."""
  748. def __init__(self, allocator, size=4096, limit=None):
  749. """
  750. Initialize the buffer: use allocator(size) call to allocate a buffer.
  751. Optionally, set the upper <limit> for the buffer size.
  752. """
  753. assert callable(allocator), 'must give alloc(size) function as first param'
  754. assert limit is None or size <= limit, 'initial size must be <= limit'
  755. self.allocator = allocator
  756. self.limit = limit
  757. self.resize(size, init=True)
  758. def __len__(self):
  759. return len(self.buffer)
  760. def resize(self, size, init=False):
  761. """
  762. resize the buffer - to avoid frequent reallocation, we usually always grow (if needed).
  763. giving init=True it is possible to first-time initialize or shrink the buffer.
  764. if a buffer size beyond the limit is requested, raise Buffer.MemoryLimitExceeded (OSError).
  765. """
  766. size = int(size)
  767. if self.limit is not None and size > self.limit:
  768. raise Buffer.MemoryLimitExceeded(size, self.limit)
  769. if init or len(self) < size:
  770. self.buffer = self.allocator(size)
  771. def get(self, size=None, init=False):
  772. """
  773. return a buffer of at least the requested size (None: any current size).
  774. init=True can be given to trigger shrinking of the buffer to the given size.
  775. """
  776. if size is not None:
  777. self.resize(size, init)
  778. return self.buffer
  779. @memoize
  780. def uid2user(uid, default=None):
  781. try:
  782. return pwd.getpwuid(uid).pw_name
  783. except KeyError:
  784. return default
  785. @memoize
  786. def user2uid(user, default=None):
  787. try:
  788. return user and pwd.getpwnam(user).pw_uid
  789. except KeyError:
  790. return default
  791. @memoize
  792. def gid2group(gid, default=None):
  793. try:
  794. return grp.getgrgid(gid).gr_name
  795. except KeyError:
  796. return default
  797. @memoize
  798. def group2gid(group, default=None):
  799. try:
  800. return group and grp.getgrnam(group).gr_gid
  801. except KeyError:
  802. return default
  803. def posix_acl_use_stored_uid_gid(acl):
  804. """Replace the user/group field with the stored uid/gid
  805. """
  806. entries = []
  807. for entry in safe_decode(acl).split('\n'):
  808. if entry:
  809. fields = entry.split(':')
  810. if len(fields) == 4:
  811. entries.append(':'.join([fields[0], fields[3], fields[2]]))
  812. else:
  813. entries.append(entry)
  814. return safe_encode('\n'.join(entries))
  815. def safe_decode(s, coding='utf-8', errors='surrogateescape'):
  816. """decode bytes to str, with round-tripping "invalid" bytes"""
  817. return s.decode(coding, errors)
  818. def safe_encode(s, coding='utf-8', errors='surrogateescape'):
  819. """encode str to bytes, with round-tripping "invalid" bytes"""
  820. return s.encode(coding, errors)
  821. def bin_to_hex(binary):
  822. return hexlify(binary).decode('ascii')
  823. def parse_stringified_list(s):
  824. l = re.split(" *, *", s)
  825. return [item for item in l if item != '']
  826. class Location:
  827. """Object representing a repository / archive location
  828. """
  829. proto = user = _host = port = path = archive = None
  830. # user must not contain "@", ":" or "/".
  831. # Quoting adduser error message:
  832. # "To avoid problems, the username should consist only of letters, digits,
  833. # underscores, periods, at signs and dashes, and not start with a dash
  834. # (as defined by IEEE Std 1003.1-2001)."
  835. # We use "@" as separator between username and hostname, so we must
  836. # disallow it within the pure username part.
  837. optional_user_re = r"""
  838. (?:(?P<user>[^@:/]+)@)?
  839. """
  840. # path must not contain :: (it ends at :: or string end), but may contain single colons.
  841. # to avoid ambiguities with other regexes, it must also not start with ":" nor with "//" nor with "ssh://".
  842. scp_path_re = r"""
  843. (?!(:|//|ssh://)) # not starting with ":" or // or ssh://
  844. (?P<path>([^:]|(:(?!:)))+) # any chars, but no "::"
  845. """
  846. # file_path must not contain :: (it ends at :: or string end), but may contain single colons.
  847. # it must start with a / and that slash is part of the path.
  848. file_path_re = r"""
  849. (?P<path>(([^/]*)/([^:]|(:(?!:)))+)) # start opt. servername, then /, then any chars, but no "::"
  850. """
  851. # abs_path must not contain :: (it ends at :: or string end), but may contain single colons.
  852. # it must start with a / and that slash is part of the path.
  853. abs_path_re = r"""
  854. (?P<path>(/([^:]|(:(?!:)))+)) # start with /, then any chars, but no "::"
  855. """
  856. # optional ::archive_name at the end, archive name must not contain "/".
  857. # borg mount's FUSE filesystem creates one level of directories from
  858. # the archive names and of course "/" is not valid in a directory name.
  859. optional_archive_re = r"""
  860. (?:
  861. :: # "::" as separator
  862. (?P<archive>[^/]+) # archive name must not contain "/"
  863. )?$""" # must match until the end
  864. # regexes for misc. kinds of supported location specifiers:
  865. ssh_re = re.compile(r"""
  866. (?P<proto>ssh):// # ssh://
  867. """ + optional_user_re + r""" # user@ (optional)
  868. (?P<host>([^:/]+|\[[0-9a-fA-F:.]+\]))(?::(?P<port>\d+))? # host or host:port or [ipv6] or [ipv6]:port
  869. """ + abs_path_re + optional_archive_re, re.VERBOSE) # path or path::archive
  870. file_re = re.compile(r"""
  871. (?P<proto>file):// # file://
  872. """ + file_path_re + optional_archive_re, re.VERBOSE) # servername/path, path or path::archive
  873. # note: scp_re is also use for local pathes
  874. scp_re = re.compile(r"""
  875. (
  876. """ + optional_user_re + r""" # user@ (optional)
  877. (?P<host>([^:/]+|\[[0-9a-fA-F:.]+\])): # host: (don't match / or [ipv6] in host to disambiguate from file:)
  878. )? # user@host: part is optional
  879. """ + scp_path_re + optional_archive_re, re.VERBOSE) # path with optional archive
  880. # get the repo from BORG_REPO env and the optional archive from param.
  881. # if the syntax requires giving REPOSITORY (see "borg mount"),
  882. # use "::" to let it use the env var.
  883. # if REPOSITORY argument is optional, it'll automatically use the env.
  884. env_re = re.compile(r""" # the repo part is fetched from BORG_REPO
  885. (?:::$) # just "::" is ok (when a pos. arg is required, no archive)
  886. | # or
  887. """ + optional_archive_re, re.VERBOSE) # archive name (optional, may be empty)
  888. def __init__(self, text=''):
  889. self.orig = text
  890. if not self.parse(self.orig):
  891. raise ValueError('Location: parse failed: %s' % self.orig)
  892. def parse(self, text):
  893. text = replace_placeholders(text)
  894. valid = self._parse(text)
  895. if valid:
  896. return True
  897. m = self.env_re.match(text)
  898. if not m:
  899. return False
  900. repo = os.environ.get('BORG_REPO')
  901. if repo is None:
  902. return False
  903. valid = self._parse(repo)
  904. if not valid:
  905. return False
  906. self.archive = m.group('archive')
  907. return True
  908. def _parse(self, text):
  909. def normpath_special(p):
  910. # avoid that normpath strips away our relative path hack and even makes p absolute
  911. relative = p.startswith('/./')
  912. p = os.path.normpath(p)
  913. return ('/.' + p) if relative else p
  914. m = self.ssh_re.match(text)
  915. if m:
  916. self.proto = m.group('proto')
  917. self.user = m.group('user')
  918. self._host = m.group('host')
  919. self.port = m.group('port') and int(m.group('port')) or None
  920. self.path = normpath_special(m.group('path'))
  921. self.archive = m.group('archive')
  922. return True
  923. m = self.file_re.match(text)
  924. if m:
  925. self.proto = m.group('proto')
  926. self.path = normpath_special(m.group('path'))
  927. self.archive = m.group('archive')
  928. return True
  929. m = self.scp_re.match(text)
  930. if m:
  931. self.user = m.group('user')
  932. self._host = m.group('host')
  933. self.path = normpath_special(m.group('path'))
  934. self.archive = m.group('archive')
  935. self.proto = self._host and 'ssh' or 'file'
  936. return True
  937. return False
  938. def __str__(self):
  939. items = [
  940. 'proto=%r' % self.proto,
  941. 'user=%r' % self.user,
  942. 'host=%r' % self.host,
  943. 'port=%r' % self.port,
  944. 'path=%r' % self.path,
  945. 'archive=%r' % self.archive,
  946. ]
  947. return ', '.join(items)
  948. def to_key_filename(self):
  949. name = re.sub('[^\w]', '_', self.path).strip('_')
  950. if self.proto != 'file':
  951. name = re.sub('[^\w]', '_', self.host) + '__' + name
  952. if len(name) > 100:
  953. # Limit file names to some reasonable length. Most file systems
  954. # limit them to 255 [unit of choice]; due to variations in unicode
  955. # handling we truncate to 100 *characters*.
  956. name = name[:100]
  957. return os.path.join(get_keys_dir(), name)
  958. def __repr__(self):
  959. return "Location(%s)" % self
  960. @property
  961. def host(self):
  962. # strip square brackets used for IPv6 addrs
  963. if self._host is not None:
  964. return self._host.lstrip('[').rstrip(']')
  965. def canonical_path(self):
  966. if self.proto == 'file':
  967. return self.path
  968. else:
  969. if self.path and self.path.startswith('~'):
  970. path = '/' + self.path # /~/x = path x relative to home dir
  971. elif self.path and not self.path.startswith('/'):
  972. path = '/./' + self.path # /./x = path x relative to cwd
  973. else:
  974. path = self.path
  975. return 'ssh://{}{}{}{}'.format('{}@'.format(self.user) if self.user else '',
  976. self._host, # needed for ipv6 addrs
  977. ':{}'.format(self.port) if self.port else '',
  978. path)
  979. def location_validator(archive=None):
  980. def validator(text):
  981. try:
  982. loc = Location(text)
  983. except ValueError:
  984. raise argparse.ArgumentTypeError('Invalid location format: "%s"' % text) from None
  985. if archive is True and not loc.archive:
  986. raise argparse.ArgumentTypeError('"%s": No archive specified' % text)
  987. elif archive is False and loc.archive:
  988. raise argparse.ArgumentTypeError('"%s" No archive can be specified' % text)
  989. return loc
  990. return validator
  991. def archivename_validator():
  992. def validator(text):
  993. if '/' in text or '::' in text or not text:
  994. raise argparse.ArgumentTypeError('Invalid repository name: "%s"' % text)
  995. return text
  996. return validator
  997. def decode_dict(d, keys, encoding='utf-8', errors='surrogateescape'):
  998. for key in keys:
  999. if isinstance(d.get(key), bytes):
  1000. d[key] = d[key].decode(encoding, errors)
  1001. return d
  1002. def remove_surrogates(s, errors='replace'):
  1003. """Replace surrogates generated by fsdecode with '?'
  1004. """
  1005. return s.encode('utf-8', errors).decode('utf-8')
  1006. _safe_re = re.compile(r'^((\.\.)?/+)+')
  1007. def make_path_safe(path):
  1008. """Make path safe by making it relative and local
  1009. """
  1010. return _safe_re.sub('', path) or '.'
  1011. def daemonize():
  1012. """Detach process from controlling terminal and run in background
  1013. Returns: old and new get_process_id tuples
  1014. """
  1015. from .locking import get_id as get_process_id
  1016. old_id = get_process_id()
  1017. pid = os.fork()
  1018. if pid:
  1019. os._exit(0)
  1020. os.setsid()
  1021. pid = os.fork()
  1022. if pid:
  1023. os._exit(0)
  1024. os.chdir('/')
  1025. os.close(0)
  1026. os.close(1)
  1027. os.close(2)
  1028. fd = os.open('/dev/null', os.O_RDWR)
  1029. os.dup2(fd, 0)
  1030. os.dup2(fd, 1)
  1031. os.dup2(fd, 2)
  1032. new_id = get_process_id()
  1033. return old_id, new_id
  1034. class StableDict(dict):
  1035. """A dict subclass with stable items() ordering"""
  1036. def items(self):
  1037. return sorted(super().items())
  1038. def bigint_to_int(mtime):
  1039. """Convert bytearray to int
  1040. """
  1041. if isinstance(mtime, bytes):
  1042. return int.from_bytes(mtime, 'little', signed=True)
  1043. return mtime
  1044. def int_to_bigint(value):
  1045. """Convert integers larger than 64 bits to bytearray
  1046. Smaller integers are left alone
  1047. """
  1048. if value.bit_length() > 63:
  1049. return value.to_bytes((value.bit_length() + 9) // 8, 'little', signed=True)
  1050. return value
  1051. def is_slow_msgpack():
  1052. return msgpack.Packer is msgpack.fallback.Packer
  1053. FALSISH = ('No', 'NO', 'no', 'N', 'n', '0', )
  1054. TRUISH = ('Yes', 'YES', 'yes', 'Y', 'y', '1', )
  1055. DEFAULTISH = ('Default', 'DEFAULT', 'default', 'D', 'd', '', )
  1056. def yes(msg=None, false_msg=None, true_msg=None, default_msg=None,
  1057. retry_msg=None, invalid_msg=None, env_msg='{} (from {})',
  1058. falsish=FALSISH, truish=TRUISH, defaultish=DEFAULTISH,
  1059. default=False, retry=True, env_var_override=None, ofile=None, input=input):
  1060. """Output <msg> (usually a question) and let user input an answer.
  1061. Qualifies the answer according to falsish, truish and defaultish as True, False or <default>.
  1062. If it didn't qualify and retry is False (no retries wanted), return the default [which
  1063. defaults to False]. If retry is True let user retry answering until answer is qualified.
  1064. If env_var_override is given and this var is present in the environment, do not ask
  1065. the user, but just use the env var contents as answer as if it was typed in.
  1066. Otherwise read input from stdin and proceed as normal.
  1067. If EOF is received instead an input or an invalid input without retry possibility,
  1068. return default.
  1069. :param msg: introducing message to output on ofile, no \n is added [None]
  1070. :param retry_msg: retry message to output on ofile, no \n is added [None]
  1071. :param false_msg: message to output before returning False [None]
  1072. :param true_msg: message to output before returning True [None]
  1073. :param default_msg: message to output before returning a <default> [None]
  1074. :param invalid_msg: message to output after a invalid answer was given [None]
  1075. :param env_msg: message to output when using input from env_var_override ['{} (from {})'],
  1076. needs to have 2 placeholders for answer and env var name
  1077. :param falsish: sequence of answers qualifying as False
  1078. :param truish: sequence of answers qualifying as True
  1079. :param defaultish: sequence of answers qualifying as <default>
  1080. :param default: default return value (defaultish answer was given or no-answer condition) [False]
  1081. :param retry: if True and input is incorrect, retry. Otherwise return default. [True]
  1082. :param env_var_override: environment variable name [None]
  1083. :param ofile: output stream [sys.stderr]
  1084. :param input: input function [input from builtins]
  1085. :return: boolean answer value, True or False
  1086. """
  1087. # note: we do not assign sys.stderr as default above, so it is
  1088. # really evaluated NOW, not at function definition time.
  1089. if ofile is None:
  1090. ofile = sys.stderr
  1091. if default not in (True, False):
  1092. raise ValueError("invalid default value, must be True or False")
  1093. if msg:
  1094. print(msg, file=ofile, end='', flush=True)
  1095. while True:
  1096. answer = None
  1097. if env_var_override:
  1098. answer = os.environ.get(env_var_override)
  1099. if answer is not None and env_msg:
  1100. print(env_msg.format(answer, env_var_override), file=ofile)
  1101. if answer is None:
  1102. try:
  1103. answer = input()
  1104. except EOFError:
  1105. # avoid defaultish[0], defaultish could be empty
  1106. answer = truish[0] if default else falsish[0]
  1107. if answer in defaultish:
  1108. if default_msg:
  1109. print(default_msg, file=ofile)
  1110. return default
  1111. if answer in truish:
  1112. if true_msg:
  1113. print(true_msg, file=ofile)
  1114. return True
  1115. if answer in falsish:
  1116. if false_msg:
  1117. print(false_msg, file=ofile)
  1118. return False
  1119. # if we get here, the answer was invalid
  1120. if invalid_msg:
  1121. print(invalid_msg, file=ofile)
  1122. if not retry:
  1123. return default
  1124. if retry_msg:
  1125. print(retry_msg, file=ofile, end='', flush=True)
  1126. # in case we used an environment variable and it gave an invalid answer, do not use it again:
  1127. env_var_override = None
  1128. class ProgressIndicatorPercent:
  1129. def __init__(self, total, step=5, start=0, same_line=False, msg="%3.0f%%", file=None):
  1130. """
  1131. Percentage-based progress indicator
  1132. :param total: total amount of items
  1133. :param step: step size in percent
  1134. :param start: at which percent value to start
  1135. :param same_line: if True, emit output always on same line
  1136. :param msg: output message, must contain one %f placeholder for the percentage
  1137. :param file: output file, default: sys.stderr
  1138. """
  1139. self.counter = 0 # 0 .. (total-1)
  1140. self.total = total
  1141. self.trigger_at = start # output next percentage value when reaching (at least) this
  1142. self.step = step
  1143. if file is None:
  1144. file = sys.stderr
  1145. self.file = file
  1146. self.msg = msg
  1147. self.same_line = same_line
  1148. def progress(self, current=None):
  1149. if current is not None:
  1150. self.counter = current
  1151. pct = self.counter * 100 / self.total
  1152. self.counter += 1
  1153. if pct >= self.trigger_at:
  1154. self.trigger_at += self.step
  1155. return pct
  1156. def show(self, current=None):
  1157. pct = self.progress(current)
  1158. if pct is not None:
  1159. return self.output(pct)
  1160. def output(self, percent):
  1161. print(self.msg % percent, file=self.file, end='\r' if self.same_line else '\n', flush=True)
  1162. def finish(self):
  1163. if self.same_line:
  1164. print(" " * len(self.msg % 100.0), file=self.file, end='\r')
  1165. class ProgressIndicatorEndless:
  1166. def __init__(self, step=10, file=None):
  1167. """
  1168. Progress indicator (long row of dots)
  1169. :param step: every Nth call, call the func
  1170. :param file: output file, default: sys.stderr
  1171. """
  1172. self.counter = 0 # call counter
  1173. self.triggered = 0 # increases 1 per trigger event
  1174. self.step = step # trigger every <step> calls
  1175. if file is None:
  1176. file = sys.stderr
  1177. self.file = file
  1178. def progress(self):
  1179. self.counter += 1
  1180. trigger = self.counter % self.step == 0
  1181. if trigger:
  1182. self.triggered += 1
  1183. return trigger
  1184. def show(self):
  1185. trigger = self.progress()
  1186. if trigger:
  1187. return self.output(self.triggered)
  1188. def output(self, triggered):
  1189. print('.', end='', file=self.file, flush=True)
  1190. def finish(self):
  1191. print(file=self.file)
  1192. def sysinfo():
  1193. python_implementation = platform.python_implementation()
  1194. python_version = platform.python_version()
  1195. # platform.uname() does a shell call internally to get processor info,
  1196. # creating #3732 issue, so rather use os.uname().
  1197. try:
  1198. uname = os.uname()
  1199. except AttributeError:
  1200. uname = None
  1201. if sys.platform.startswith('linux'):
  1202. try:
  1203. linux_distribution = platform.linux_distribution()
  1204. except:
  1205. # platform.linux_distribution() is deprecated since py 3.5 and removed in 3.7.
  1206. linux_distribution = ('Unknown Linux', '', '')
  1207. else:
  1208. linux_distribution = None
  1209. info = []
  1210. if uname is not None:
  1211. info.append('Platform: %s' % (' '.join(uname), ))
  1212. if linux_distribution is not None:
  1213. info.append('Linux: %s %s %s' % linux_distribution)
  1214. info.append('Borg: %s Python: %s %s' % (borg_version, python_implementation, python_version))
  1215. info.append('PID: %d CWD: %s' % (os.getpid(), os.getcwd()))
  1216. info.append('sys.argv: %r' % sys.argv)
  1217. info.append('SSH_ORIGINAL_COMMAND: %r' % os.environ.get('SSH_ORIGINAL_COMMAND'))
  1218. info.append('')
  1219. return '\n'.join(info)
  1220. def log_multi(*msgs, level=logging.INFO):
  1221. """
  1222. log multiple lines of text, each line by a separate logging call for cosmetic reasons
  1223. each positional argument may be a single or multiple lines (separated by newlines) of text.
  1224. """
  1225. lines = []
  1226. for msg in msgs:
  1227. lines.extend(msg.splitlines())
  1228. for line in lines:
  1229. logger.log(level, line)
  1230. class ErrorIgnoringTextIOWrapper(io.TextIOWrapper):
  1231. def read(self, n):
  1232. if not self.closed:
  1233. try:
  1234. return super().read(n)
  1235. except BrokenPipeError:
  1236. try:
  1237. super().close()
  1238. except OSError:
  1239. pass
  1240. return ''
  1241. def write(self, s):
  1242. if not self.closed:
  1243. try:
  1244. return super().write(s)
  1245. except BrokenPipeError:
  1246. try:
  1247. super().close()
  1248. except OSError:
  1249. pass
  1250. return len(s)
  1251. class SignalException(BaseException):
  1252. """base class for all signal-based exceptions"""
  1253. class SigHup(SignalException):
  1254. """raised on SIGHUP signal"""
  1255. class SigTerm(SignalException):
  1256. """raised on SIGTERM signal"""
  1257. @contextlib.contextmanager
  1258. def signal_handler(sig, handler):
  1259. """
  1260. when entering context, set up signal handler <handler> for signal <sig>.
  1261. when leaving context, restore original signal handler.
  1262. <sig> can bei either a str when giving a signal.SIGXXX attribute name (it
  1263. won't crash if the attribute name does not exist as some names are platform
  1264. specific) or a int, when giving a signal number.
  1265. <handler> is any handler value as accepted by the signal.signal(sig, handler).
  1266. """
  1267. if isinstance(sig, str):
  1268. sig = getattr(signal, sig, None)
  1269. if sig is not None:
  1270. orig_handler = signal.signal(sig, handler)
  1271. try:
  1272. yield
  1273. finally:
  1274. if sig is not None:
  1275. signal.signal(sig, orig_handler)
  1276. def raising_signal_handler(exc_cls):
  1277. def handler(sig_no, frame):
  1278. # setting SIG_IGN avoids that an incoming second signal of this
  1279. # kind would raise a 2nd exception while we still process the
  1280. # exception handler for exc_cls for the 1st signal.
  1281. signal.signal(sig_no, signal.SIG_IGN)
  1282. raise exc_cls
  1283. return handler
  1284. def prepare_subprocess_env(system, env=None):
  1285. """
  1286. Prepare the environment for a subprocess we are going to create.
  1287. :param system: True for preparing to invoke system-installed binaries,
  1288. False for stuff inside the pyinstaller environment (like borg, python).
  1289. :param env: optionally give a environment dict here. if not given, default to os.environ.
  1290. :return: a modified copy of the environment
  1291. """
  1292. env = dict(env if env is not None else os.environ)
  1293. if system:
  1294. # a pyinstaller binary's bootloader modifies LD_LIBRARY_PATH=/tmp/_MEIXXXXXX,
  1295. # but we do not want that system binaries (like ssh or other) pick up
  1296. # (non-matching) libraries from there.
  1297. # thus we install the original LDLP, before pyinstaller has modified it:
  1298. lp_key = 'LD_LIBRARY_PATH'
  1299. lp_orig = env.get(lp_key + '_ORIG') # pyinstaller >= 20160820 / v3.2.1 has this
  1300. if lp_orig is not None:
  1301. env[lp_key] = lp_orig
  1302. else:
  1303. # We get here in 2 cases:
  1304. # 1. when not running a pyinstaller-made binary.
  1305. # in this case, we must not kill LDLP.
  1306. # 2. when running a pyinstaller-made binary and there was no LDLP
  1307. # in the original env (in this case, the pyinstaller bootloader
  1308. # does *not* put ..._ORIG into the env either).
  1309. # in this case, we must kill LDLP.
  1310. # The directory used by pyinstaller is created by mkdtemp("_MEIXXXXXX"),
  1311. # we can use that to differentiate between the cases.
  1312. lp = env.get(lp_key)
  1313. if lp is not None and re.search(r'/_MEI......', lp):
  1314. env.pop(lp_key)
  1315. # security: do not give secrets to subprocess
  1316. env.pop('BORG_PASSPHRASE', None)
  1317. # for information, give borg version to the subprocess
  1318. env['BORG_VERSION'] = borg_version
  1319. return env