borgmatic.py 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883
  1. import collections
  2. import copy
  3. import json
  4. import logging
  5. import os
  6. import sys
  7. import time
  8. from queue import Queue
  9. from subprocess import CalledProcessError
  10. import colorama
  11. import pkg_resources
  12. from borgmatic.borg import borg as borg_borg
  13. from borgmatic.borg import check as borg_check
  14. from borgmatic.borg import compact as borg_compact
  15. from borgmatic.borg import create as borg_create
  16. from borgmatic.borg import environment as borg_environment
  17. from borgmatic.borg import export_tar as borg_export_tar
  18. from borgmatic.borg import extract as borg_extract
  19. from borgmatic.borg import feature as borg_feature
  20. from borgmatic.borg import info as borg_info
  21. from borgmatic.borg import init as borg_init
  22. from borgmatic.borg import list as borg_list
  23. from borgmatic.borg import mount as borg_mount
  24. from borgmatic.borg import prune as borg_prune
  25. from borgmatic.borg import umount as borg_umount
  26. from borgmatic.borg import version as borg_version
  27. from borgmatic.commands.arguments import parse_arguments
  28. from borgmatic.config import checks, collect, convert, validate
  29. from borgmatic.hooks import command, dispatch, dump, monitor
  30. from borgmatic.logger import configure_logging, should_do_markup
  31. from borgmatic.signals import configure_signals
  32. from borgmatic.verbosity import verbosity_to_log_level
  33. logger = logging.getLogger(__name__)
  34. LEGACY_CONFIG_PATH = '/etc/borgmatic/config'
  35. def run_configuration(config_filename, config, arguments):
  36. '''
  37. Given a config filename, the corresponding parsed config dict, and command-line arguments as a
  38. dict from subparser name to a namespace of parsed arguments, execute the defined prune, compact,
  39. create, check, and/or other actions.
  40. Yield a combination of:
  41. * JSON output strings from successfully executing any actions that produce JSON
  42. * logging.LogRecord instances containing errors from any actions or backup hooks that fail
  43. '''
  44. (location, storage, retention, consistency, hooks) = (
  45. config.get(section_name, {})
  46. for section_name in ('location', 'storage', 'retention', 'consistency', 'hooks')
  47. )
  48. global_arguments = arguments['global']
  49. local_path = location.get('local_path', 'borg')
  50. remote_path = location.get('remote_path')
  51. retries = storage.get('retries', 0)
  52. retry_wait = storage.get('retry_wait', 0)
  53. borg_environment.initialize(storage)
  54. encountered_error = None
  55. error_repository = ''
  56. using_primary_action = {'prune', 'compact', 'create', 'check'}.intersection(arguments)
  57. monitoring_log_level = verbosity_to_log_level(global_arguments.monitoring_verbosity)
  58. try:
  59. local_borg_version = borg_version.local_borg_version(local_path)
  60. except (OSError, CalledProcessError, ValueError) as error:
  61. yield from make_error_log_records(
  62. '{}: Error getting local Borg version'.format(config_filename), error
  63. )
  64. return
  65. try:
  66. if using_primary_action:
  67. dispatch.call_hooks(
  68. 'initialize_monitor',
  69. hooks,
  70. config_filename,
  71. monitor.MONITOR_HOOK_NAMES,
  72. monitoring_log_level,
  73. global_arguments.dry_run,
  74. )
  75. if 'prune' in arguments:
  76. command.execute_hook(
  77. hooks.get('before_prune'),
  78. hooks.get('umask'),
  79. config_filename,
  80. 'pre-prune',
  81. global_arguments.dry_run,
  82. )
  83. if 'compact' in arguments:
  84. command.execute_hook(
  85. hooks.get('before_compact'),
  86. hooks.get('umask'),
  87. config_filename,
  88. 'pre-compact',
  89. global_arguments.dry_run,
  90. )
  91. if 'create' in arguments:
  92. command.execute_hook(
  93. hooks.get('before_backup'),
  94. hooks.get('umask'),
  95. config_filename,
  96. 'pre-backup',
  97. global_arguments.dry_run,
  98. )
  99. if 'check' in arguments:
  100. command.execute_hook(
  101. hooks.get('before_check'),
  102. hooks.get('umask'),
  103. config_filename,
  104. 'pre-check',
  105. global_arguments.dry_run,
  106. )
  107. if 'extract' in arguments:
  108. command.execute_hook(
  109. hooks.get('before_extract'),
  110. hooks.get('umask'),
  111. config_filename,
  112. 'pre-extract',
  113. global_arguments.dry_run,
  114. )
  115. if using_primary_action:
  116. dispatch.call_hooks(
  117. 'ping_monitor',
  118. hooks,
  119. config_filename,
  120. monitor.MONITOR_HOOK_NAMES,
  121. monitor.State.START,
  122. monitoring_log_level,
  123. global_arguments.dry_run,
  124. )
  125. except (OSError, CalledProcessError) as error:
  126. if command.considered_soft_failure(config_filename, error):
  127. return
  128. encountered_error = error
  129. yield from make_error_log_records(
  130. '{}: Error running pre hook'.format(config_filename), error
  131. )
  132. if not encountered_error:
  133. repo_queue = Queue()
  134. for repo in location['repositories']:
  135. repo_queue.put((repo, 0),)
  136. while not repo_queue.empty():
  137. repository_path, retry_num = repo_queue.get()
  138. timeout = retry_num * retry_wait
  139. if timeout:
  140. logger.warning(f'{config_filename}: Sleeping {timeout}s before next retry')
  141. time.sleep(timeout)
  142. try:
  143. yield from run_actions(
  144. arguments=arguments,
  145. location=location,
  146. storage=storage,
  147. retention=retention,
  148. consistency=consistency,
  149. hooks=hooks,
  150. local_path=local_path,
  151. remote_path=remote_path,
  152. local_borg_version=local_borg_version,
  153. repository_path=repository_path,
  154. )
  155. except (OSError, CalledProcessError, ValueError) as error:
  156. yield from make_error_log_records(
  157. '{}: Error running actions for repository'.format(repository_path), error
  158. )
  159. if retry_num < retries:
  160. repo_queue.put((repository_path, retry_num + 1),)
  161. logger.warning(
  162. f'{config_filename}: Retrying... attempt {retry_num + 1}/{retries}'
  163. )
  164. continue
  165. encountered_error = error
  166. error_repository = repository_path
  167. if not encountered_error:
  168. try:
  169. if 'prune' in arguments:
  170. command.execute_hook(
  171. hooks.get('after_prune'),
  172. hooks.get('umask'),
  173. config_filename,
  174. 'post-prune',
  175. global_arguments.dry_run,
  176. )
  177. if 'compact' in arguments:
  178. command.execute_hook(
  179. hooks.get('after_compact'),
  180. hooks.get('umask'),
  181. config_filename,
  182. 'post-compact',
  183. global_arguments.dry_run,
  184. )
  185. if 'create' in arguments:
  186. dispatch.call_hooks(
  187. 'remove_database_dumps',
  188. hooks,
  189. config_filename,
  190. dump.DATABASE_HOOK_NAMES,
  191. location,
  192. global_arguments.dry_run,
  193. )
  194. command.execute_hook(
  195. hooks.get('after_backup'),
  196. hooks.get('umask'),
  197. config_filename,
  198. 'post-backup',
  199. global_arguments.dry_run,
  200. )
  201. if 'check' in arguments:
  202. command.execute_hook(
  203. hooks.get('after_check'),
  204. hooks.get('umask'),
  205. config_filename,
  206. 'post-check',
  207. global_arguments.dry_run,
  208. )
  209. if 'extract' in arguments:
  210. command.execute_hook(
  211. hooks.get('after_extract'),
  212. hooks.get('umask'),
  213. config_filename,
  214. 'post-extract',
  215. global_arguments.dry_run,
  216. )
  217. if using_primary_action:
  218. dispatch.call_hooks(
  219. 'ping_monitor',
  220. hooks,
  221. config_filename,
  222. monitor.MONITOR_HOOK_NAMES,
  223. monitor.State.FINISH,
  224. monitoring_log_level,
  225. global_arguments.dry_run,
  226. )
  227. dispatch.call_hooks(
  228. 'destroy_monitor',
  229. hooks,
  230. config_filename,
  231. monitor.MONITOR_HOOK_NAMES,
  232. monitoring_log_level,
  233. global_arguments.dry_run,
  234. )
  235. except (OSError, CalledProcessError) as error:
  236. if command.considered_soft_failure(config_filename, error):
  237. return
  238. encountered_error = error
  239. yield from make_error_log_records(
  240. '{}: Error running post hook'.format(config_filename), error
  241. )
  242. if encountered_error and using_primary_action:
  243. try:
  244. command.execute_hook(
  245. hooks.get('on_error'),
  246. hooks.get('umask'),
  247. config_filename,
  248. 'on-error',
  249. global_arguments.dry_run,
  250. repository=error_repository,
  251. error=encountered_error,
  252. output=getattr(encountered_error, 'output', ''),
  253. )
  254. dispatch.call_hooks(
  255. 'ping_monitor',
  256. hooks,
  257. config_filename,
  258. monitor.MONITOR_HOOK_NAMES,
  259. monitor.State.FAIL,
  260. monitoring_log_level,
  261. global_arguments.dry_run,
  262. )
  263. dispatch.call_hooks(
  264. 'destroy_monitor',
  265. hooks,
  266. config_filename,
  267. monitor.MONITOR_HOOK_NAMES,
  268. monitoring_log_level,
  269. global_arguments.dry_run,
  270. )
  271. except (OSError, CalledProcessError) as error:
  272. if command.considered_soft_failure(config_filename, error):
  273. return
  274. yield from make_error_log_records(
  275. '{}: Error running on-error hook'.format(config_filename), error
  276. )
  277. def run_actions(
  278. *,
  279. arguments,
  280. location,
  281. storage,
  282. retention,
  283. consistency,
  284. hooks,
  285. local_path,
  286. remote_path,
  287. local_borg_version,
  288. repository_path,
  289. ): # pragma: no cover
  290. '''
  291. Given parsed command-line arguments as an argparse.ArgumentParser instance, several different
  292. configuration dicts, local and remote paths to Borg, a local Borg version string, and a
  293. repository name, run all actions from the command-line arguments on the given repository.
  294. Yield JSON output strings from executing any actions that produce JSON.
  295. Raise OSError or subprocess.CalledProcessError if an error occurs running a command for an
  296. action. Raise ValueError if the arguments or configuration passed to action are invalid.
  297. '''
  298. repository = os.path.expanduser(repository_path)
  299. global_arguments = arguments['global']
  300. dry_run_label = ' (dry run; not making any changes)' if global_arguments.dry_run else ''
  301. if 'init' in arguments:
  302. logger.info('{}: Initializing repository'.format(repository))
  303. borg_init.initialize_repository(
  304. repository,
  305. storage,
  306. arguments['init'].encryption_mode,
  307. arguments['init'].append_only,
  308. arguments['init'].storage_quota,
  309. local_path=local_path,
  310. remote_path=remote_path,
  311. )
  312. if 'prune' in arguments:
  313. logger.info('{}: Pruning archives{}'.format(repository, dry_run_label))
  314. borg_prune.prune_archives(
  315. global_arguments.dry_run,
  316. repository,
  317. storage,
  318. retention,
  319. local_path=local_path,
  320. remote_path=remote_path,
  321. stats=arguments['prune'].stats,
  322. files=arguments['prune'].files,
  323. )
  324. if 'compact' in arguments:
  325. if borg_feature.available(borg_feature.Feature.COMPACT, local_borg_version):
  326. logger.info('{}: Compacting segments{}'.format(repository, dry_run_label))
  327. borg_compact.compact_segments(
  328. global_arguments.dry_run,
  329. repository,
  330. storage,
  331. local_path=local_path,
  332. remote_path=remote_path,
  333. progress=arguments['compact'].progress,
  334. cleanup_commits=arguments['compact'].cleanup_commits,
  335. threshold=arguments['compact'].threshold,
  336. )
  337. else:
  338. logger.info(
  339. '{}: Skipping compact (only available/needed in Borg 1.2+)'.format(repository)
  340. )
  341. if 'create' in arguments:
  342. logger.info('{}: Creating archive{}'.format(repository, dry_run_label))
  343. dispatch.call_hooks(
  344. 'remove_database_dumps',
  345. hooks,
  346. repository,
  347. dump.DATABASE_HOOK_NAMES,
  348. location,
  349. global_arguments.dry_run,
  350. )
  351. active_dumps = dispatch.call_hooks(
  352. 'dump_databases',
  353. hooks,
  354. repository,
  355. dump.DATABASE_HOOK_NAMES,
  356. location,
  357. global_arguments.dry_run,
  358. )
  359. stream_processes = [process for processes in active_dumps.values() for process in processes]
  360. json_output = borg_create.create_archive(
  361. global_arguments.dry_run,
  362. repository,
  363. location,
  364. storage,
  365. local_path=local_path,
  366. remote_path=remote_path,
  367. progress=arguments['create'].progress,
  368. stats=arguments['create'].stats,
  369. json=arguments['create'].json,
  370. files=arguments['create'].files,
  371. stream_processes=stream_processes,
  372. )
  373. if json_output:
  374. yield json.loads(json_output)
  375. if 'check' in arguments and checks.repository_enabled_for_checks(repository, consistency):
  376. logger.info('{}: Running consistency checks'.format(repository))
  377. borg_check.check_archives(
  378. repository,
  379. storage,
  380. consistency,
  381. local_path=local_path,
  382. remote_path=remote_path,
  383. progress=arguments['check'].progress,
  384. repair=arguments['check'].repair,
  385. only_checks=arguments['check'].only,
  386. )
  387. if 'extract' in arguments:
  388. if arguments['extract'].repository is None or validate.repositories_match(
  389. repository, arguments['extract'].repository
  390. ):
  391. logger.info(
  392. '{}: Extracting archive {}'.format(repository, arguments['extract'].archive)
  393. )
  394. borg_extract.extract_archive(
  395. global_arguments.dry_run,
  396. repository,
  397. borg_list.resolve_archive_name(
  398. repository, arguments['extract'].archive, storage, local_path, remote_path
  399. ),
  400. arguments['extract'].paths,
  401. location,
  402. storage,
  403. local_path=local_path,
  404. remote_path=remote_path,
  405. destination_path=arguments['extract'].destination,
  406. strip_components=arguments['extract'].strip_components,
  407. progress=arguments['extract'].progress,
  408. )
  409. if 'export-tar' in arguments:
  410. if arguments['export-tar'].repository is None or validate.repositories_match(
  411. repository, arguments['export-tar'].repository
  412. ):
  413. logger.info(
  414. '{}: Exporting archive {} as tar file'.format(
  415. repository, arguments['export-tar'].archive
  416. )
  417. )
  418. borg_export_tar.export_tar_archive(
  419. global_arguments.dry_run,
  420. repository,
  421. borg_list.resolve_archive_name(
  422. repository, arguments['export-tar'].archive, storage, local_path, remote_path
  423. ),
  424. arguments['export-tar'].paths,
  425. arguments['export-tar'].destination,
  426. storage,
  427. local_path=local_path,
  428. remote_path=remote_path,
  429. tar_filter=arguments['export-tar'].tar_filter,
  430. files=arguments['export-tar'].files,
  431. strip_components=arguments['export-tar'].strip_components,
  432. )
  433. if 'mount' in arguments:
  434. if arguments['mount'].repository is None or validate.repositories_match(
  435. repository, arguments['mount'].repository
  436. ):
  437. if arguments['mount'].archive:
  438. logger.info(
  439. '{}: Mounting archive {}'.format(repository, arguments['mount'].archive)
  440. )
  441. else:
  442. logger.info('{}: Mounting repository'.format(repository))
  443. borg_mount.mount_archive(
  444. repository,
  445. borg_list.resolve_archive_name(
  446. repository, arguments['mount'].archive, storage, local_path, remote_path
  447. ),
  448. arguments['mount'].mount_point,
  449. arguments['mount'].paths,
  450. arguments['mount'].foreground,
  451. arguments['mount'].options,
  452. storage,
  453. local_path=local_path,
  454. remote_path=remote_path,
  455. )
  456. if 'restore' in arguments:
  457. if arguments['restore'].repository is None or validate.repositories_match(
  458. repository, arguments['restore'].repository
  459. ):
  460. logger.info(
  461. '{}: Restoring databases from archive {}'.format(
  462. repository, arguments['restore'].archive
  463. )
  464. )
  465. dispatch.call_hooks(
  466. 'remove_database_dumps',
  467. hooks,
  468. repository,
  469. dump.DATABASE_HOOK_NAMES,
  470. location,
  471. global_arguments.dry_run,
  472. )
  473. restore_names = arguments['restore'].databases or []
  474. if 'all' in restore_names:
  475. restore_names = []
  476. archive_name = borg_list.resolve_archive_name(
  477. repository, arguments['restore'].archive, storage, local_path, remote_path
  478. )
  479. found_names = set()
  480. for hook_name, per_hook_restore_databases in hooks.items():
  481. if hook_name not in dump.DATABASE_HOOK_NAMES:
  482. continue
  483. for restore_database in per_hook_restore_databases:
  484. database_name = restore_database['name']
  485. if restore_names and database_name not in restore_names:
  486. continue
  487. found_names.add(database_name)
  488. dump_pattern = dispatch.call_hooks(
  489. 'make_database_dump_pattern',
  490. hooks,
  491. repository,
  492. dump.DATABASE_HOOK_NAMES,
  493. location,
  494. database_name,
  495. )[hook_name]
  496. # Kick off a single database extract to stdout.
  497. extract_process = borg_extract.extract_archive(
  498. dry_run=global_arguments.dry_run,
  499. repository=repository,
  500. archive=archive_name,
  501. paths=dump.convert_glob_patterns_to_borg_patterns([dump_pattern]),
  502. location_config=location,
  503. storage_config=storage,
  504. local_path=local_path,
  505. remote_path=remote_path,
  506. destination_path='/',
  507. # A directory format dump isn't a single file, and therefore can't extract
  508. # to stdout. In this case, the extract_process return value is None.
  509. extract_to_stdout=bool(restore_database.get('format') != 'directory'),
  510. )
  511. # Run a single database restore, consuming the extract stdout (if any).
  512. dispatch.call_hooks(
  513. 'restore_database_dump',
  514. {hook_name: [restore_database]},
  515. repository,
  516. dump.DATABASE_HOOK_NAMES,
  517. location,
  518. global_arguments.dry_run,
  519. extract_process,
  520. )
  521. dispatch.call_hooks(
  522. 'remove_database_dumps',
  523. hooks,
  524. repository,
  525. dump.DATABASE_HOOK_NAMES,
  526. location,
  527. global_arguments.dry_run,
  528. )
  529. if not restore_names and not found_names:
  530. raise ValueError('No databases were found to restore')
  531. missing_names = sorted(set(restore_names) - found_names)
  532. if missing_names:
  533. raise ValueError(
  534. 'Cannot restore database(s) {} missing from borgmatic\'s configuration'.format(
  535. ', '.join(missing_names)
  536. )
  537. )
  538. if 'list' in arguments:
  539. if arguments['list'].repository is None or validate.repositories_match(
  540. repository, arguments['list'].repository
  541. ):
  542. list_arguments = copy.copy(arguments['list'])
  543. if not list_arguments.json:
  544. logger.warning('{}: Listing archives'.format(repository))
  545. list_arguments.archive = borg_list.resolve_archive_name(
  546. repository, list_arguments.archive, storage, local_path, remote_path
  547. )
  548. json_output = borg_list.list_archives(
  549. repository,
  550. storage,
  551. list_arguments=list_arguments,
  552. local_path=local_path,
  553. remote_path=remote_path,
  554. )
  555. if json_output:
  556. yield json.loads(json_output)
  557. if 'info' in arguments:
  558. if arguments['info'].repository is None or validate.repositories_match(
  559. repository, arguments['info'].repository
  560. ):
  561. info_arguments = copy.copy(arguments['info'])
  562. if not info_arguments.json:
  563. logger.warning('{}: Displaying summary info for archives'.format(repository))
  564. info_arguments.archive = borg_list.resolve_archive_name(
  565. repository, info_arguments.archive, storage, local_path, remote_path
  566. )
  567. json_output = borg_info.display_archives_info(
  568. repository,
  569. storage,
  570. info_arguments=info_arguments,
  571. local_path=local_path,
  572. remote_path=remote_path,
  573. )
  574. if json_output:
  575. yield json.loads(json_output)
  576. if 'borg' in arguments:
  577. if arguments['borg'].repository is None or validate.repositories_match(
  578. repository, arguments['borg'].repository
  579. ):
  580. logger.warning('{}: Running arbitrary Borg command'.format(repository))
  581. archive_name = borg_list.resolve_archive_name(
  582. repository, arguments['borg'].archive, storage, local_path, remote_path
  583. )
  584. borg_borg.run_arbitrary_borg(
  585. repository,
  586. storage,
  587. options=arguments['borg'].options,
  588. archive=archive_name,
  589. local_path=local_path,
  590. remote_path=remote_path,
  591. )
  592. def load_configurations(config_filenames, overrides=None):
  593. '''
  594. Given a sequence of configuration filenames, load and validate each configuration file. Return
  595. the results as a tuple of: dict of configuration filename to corresponding parsed configuration,
  596. and sequence of logging.LogRecord instances containing any parse errors.
  597. '''
  598. # Dict mapping from config filename to corresponding parsed config dict.
  599. configs = collections.OrderedDict()
  600. logs = []
  601. # Parse and load each configuration file.
  602. for config_filename in config_filenames:
  603. try:
  604. configs[config_filename] = validate.parse_configuration(
  605. config_filename, validate.schema_filename(), overrides
  606. )
  607. except (ValueError, OSError, validate.Validation_error) as error:
  608. logs.extend(
  609. [
  610. logging.makeLogRecord(
  611. dict(
  612. levelno=logging.CRITICAL,
  613. levelname='CRITICAL',
  614. msg='{}: Error parsing configuration file'.format(config_filename),
  615. )
  616. ),
  617. logging.makeLogRecord(
  618. dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error)
  619. ),
  620. ]
  621. )
  622. return (configs, logs)
  623. def log_record(suppress_log=False, **kwargs):
  624. '''
  625. Create a log record based on the given makeLogRecord() arguments, one of which must be
  626. named "levelno". Log the record (unless suppress log is set) and return it.
  627. '''
  628. record = logging.makeLogRecord(kwargs)
  629. if suppress_log:
  630. return record
  631. logger.handle(record)
  632. return record
  633. def make_error_log_records(message, error=None):
  634. '''
  635. Given error message text and an optional exception object, yield a series of logging.LogRecord
  636. instances with error summary information. As a side effect, log each record.
  637. '''
  638. if not error:
  639. yield log_record(levelno=logging.CRITICAL, levelname='CRITICAL', msg=message)
  640. return
  641. try:
  642. raise error
  643. except CalledProcessError as error:
  644. yield log_record(levelno=logging.CRITICAL, levelname='CRITICAL', msg=message)
  645. if error.output:
  646. # Suppress these logs for now and save full error output for the log summary at the end.
  647. yield log_record(
  648. levelno=logging.CRITICAL, levelname='CRITICAL', msg=error.output, suppress_log=True
  649. )
  650. yield log_record(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error)
  651. except (ValueError, OSError) as error:
  652. yield log_record(levelno=logging.CRITICAL, levelname='CRITICAL', msg=message)
  653. yield log_record(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error)
  654. except: # noqa: E722
  655. # Raising above only as a means of determining the error type. Swallow the exception here
  656. # because we don't want the exception to propagate out of this function.
  657. pass
  658. def get_local_path(configs):
  659. '''
  660. Arbitrarily return the local path from the first configuration dict. Default to "borg" if not
  661. set.
  662. '''
  663. return next(iter(configs.values())).get('location', {}).get('local_path', 'borg')
  664. def collect_configuration_run_summary_logs(configs, arguments):
  665. '''
  666. Given a dict of configuration filename to corresponding parsed configuration, and parsed
  667. command-line arguments as a dict from subparser name to a parsed namespace of arguments, run
  668. each configuration file and yield a series of logging.LogRecord instances containing summary
  669. information about each run.
  670. As a side effect of running through these configuration files, output their JSON results, if
  671. any, to stdout.
  672. '''
  673. # Run cross-file validation checks.
  674. if 'extract' in arguments:
  675. repository = arguments['extract'].repository
  676. elif 'list' in arguments and arguments['list'].archive:
  677. repository = arguments['list'].repository
  678. elif 'mount' in arguments:
  679. repository = arguments['mount'].repository
  680. else:
  681. repository = None
  682. if repository:
  683. try:
  684. validate.guard_configuration_contains_repository(repository, configs)
  685. except ValueError as error:
  686. yield from make_error_log_records(str(error))
  687. return
  688. if not configs:
  689. yield from make_error_log_records(
  690. '{}: No valid configuration files found'.format(
  691. ' '.join(arguments['global'].config_paths)
  692. )
  693. )
  694. return
  695. if 'create' in arguments:
  696. try:
  697. for config_filename, config in configs.items():
  698. hooks = config.get('hooks', {})
  699. command.execute_hook(
  700. hooks.get('before_everything'),
  701. hooks.get('umask'),
  702. config_filename,
  703. 'pre-everything',
  704. arguments['global'].dry_run,
  705. )
  706. except (CalledProcessError, ValueError, OSError) as error:
  707. yield from make_error_log_records('Error running pre-everything hook', error)
  708. return
  709. # Execute the actions corresponding to each configuration file.
  710. json_results = []
  711. for config_filename, config in configs.items():
  712. results = list(run_configuration(config_filename, config, arguments))
  713. error_logs = tuple(result for result in results if isinstance(result, logging.LogRecord))
  714. if error_logs:
  715. yield from make_error_log_records(
  716. '{}: Error running configuration file'.format(config_filename)
  717. )
  718. yield from error_logs
  719. else:
  720. yield logging.makeLogRecord(
  721. dict(
  722. levelno=logging.INFO,
  723. levelname='INFO',
  724. msg='{}: Successfully ran configuration file'.format(config_filename),
  725. )
  726. )
  727. if results:
  728. json_results.extend(results)
  729. if 'umount' in arguments:
  730. logger.info('Unmounting mount point {}'.format(arguments['umount'].mount_point))
  731. try:
  732. borg_umount.unmount_archive(
  733. mount_point=arguments['umount'].mount_point, local_path=get_local_path(configs)
  734. )
  735. except (CalledProcessError, OSError) as error:
  736. yield from make_error_log_records('Error unmounting mount point', error)
  737. if json_results:
  738. sys.stdout.write(json.dumps(json_results))
  739. if 'create' in arguments:
  740. try:
  741. for config_filename, config in configs.items():
  742. hooks = config.get('hooks', {})
  743. command.execute_hook(
  744. hooks.get('after_everything'),
  745. hooks.get('umask'),
  746. config_filename,
  747. 'post-everything',
  748. arguments['global'].dry_run,
  749. )
  750. except (CalledProcessError, ValueError, OSError) as error:
  751. yield from make_error_log_records('Error running post-everything hook', error)
  752. def exit_with_help_link(): # pragma: no cover
  753. '''
  754. Display a link to get help and exit with an error code.
  755. '''
  756. logger.critical('')
  757. logger.critical('Need some help? https://torsion.org/borgmatic/#issues')
  758. sys.exit(1)
  759. def main(): # pragma: no cover
  760. configure_signals()
  761. try:
  762. arguments = parse_arguments(*sys.argv[1:])
  763. except ValueError as error:
  764. configure_logging(logging.CRITICAL)
  765. logger.critical(error)
  766. exit_with_help_link()
  767. except SystemExit as error:
  768. if error.code == 0:
  769. raise error
  770. configure_logging(logging.CRITICAL)
  771. logger.critical('Error parsing arguments: {}'.format(' '.join(sys.argv)))
  772. exit_with_help_link()
  773. global_arguments = arguments['global']
  774. if global_arguments.version:
  775. print(pkg_resources.require('borgmatic')[0].version)
  776. sys.exit(0)
  777. config_filenames = tuple(collect.collect_config_filenames(global_arguments.config_paths))
  778. configs, parse_logs = load_configurations(config_filenames, global_arguments.overrides)
  779. any_json_flags = any(
  780. getattr(sub_arguments, 'json', False) for sub_arguments in arguments.values()
  781. )
  782. colorama.init(
  783. autoreset=True,
  784. strip=not should_do_markup(global_arguments.no_color or any_json_flags, configs),
  785. )
  786. try:
  787. configure_logging(
  788. verbosity_to_log_level(global_arguments.verbosity),
  789. verbosity_to_log_level(global_arguments.syslog_verbosity),
  790. verbosity_to_log_level(global_arguments.log_file_verbosity),
  791. verbosity_to_log_level(global_arguments.monitoring_verbosity),
  792. global_arguments.log_file,
  793. )
  794. except (FileNotFoundError, PermissionError) as error:
  795. configure_logging(logging.CRITICAL)
  796. logger.critical('Error configuring logging: {}'.format(error))
  797. exit_with_help_link()
  798. logger.debug('Ensuring legacy configuration is upgraded')
  799. convert.guard_configuration_upgraded(LEGACY_CONFIG_PATH, config_filenames)
  800. summary_logs = parse_logs + list(collect_configuration_run_summary_logs(configs, arguments))
  801. summary_logs_max_level = max(log.levelno for log in summary_logs)
  802. for message in ('', 'summary:'):
  803. log_record(
  804. levelno=summary_logs_max_level,
  805. levelname=logging.getLevelName(summary_logs_max_level),
  806. msg=message,
  807. )
  808. for log in summary_logs:
  809. logger.handle(log)
  810. if summary_logs_max_level >= logging.CRITICAL:
  811. exit_with_help_link()