create.py 7.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205
  1. import glob
  2. import itertools
  3. import logging
  4. import os
  5. import tempfile
  6. from borgmatic.execute import execute_command, execute_command_without_capture
  7. logger = logging.getLogger(__name__)
  8. def _expand_directory(directory):
  9. '''
  10. Given a directory path, expand any tilde (representing a user's home directory) and any globs
  11. therein. Return a list of one or more resulting paths.
  12. '''
  13. expanded_directory = os.path.expanduser(directory)
  14. return glob.glob(expanded_directory) or [expanded_directory]
  15. def _expand_directories(directories):
  16. '''
  17. Given a sequence of directory paths, expand tildes and globs in each one. Return all the
  18. resulting directories as a single flattened tuple.
  19. '''
  20. if directories is None:
  21. return ()
  22. return tuple(
  23. itertools.chain.from_iterable(_expand_directory(directory) for directory in directories)
  24. )
  25. def _expand_home_directories(directories):
  26. '''
  27. Given a sequence of directory paths, expand tildes in each one. Do not perform any globbing.
  28. Return the results as a tuple.
  29. '''
  30. if directories is None:
  31. return ()
  32. return tuple(os.path.expanduser(directory) for directory in directories)
  33. def _write_pattern_file(patterns=None):
  34. '''
  35. Given a sequence of patterns, write them to a named temporary file and return it. Return None
  36. if no patterns are provided.
  37. '''
  38. if not patterns:
  39. return None
  40. pattern_file = tempfile.NamedTemporaryFile('w')
  41. pattern_file.write('\n'.join(patterns))
  42. pattern_file.flush()
  43. return pattern_file
  44. def _make_pattern_flags(location_config, pattern_filename=None):
  45. '''
  46. Given a location config dict with a potential patterns_from option, and a filename containing
  47. any additional patterns, return the corresponding Borg flags for those files as a tuple.
  48. '''
  49. pattern_filenames = tuple(location_config.get('patterns_from') or ()) + (
  50. (pattern_filename,) if pattern_filename else ()
  51. )
  52. return tuple(
  53. itertools.chain.from_iterable(
  54. ('--patterns-from', pattern_filename) for pattern_filename in pattern_filenames
  55. )
  56. )
  57. def _make_exclude_flags(location_config, exclude_filename=None):
  58. '''
  59. Given a location config dict with various exclude options, and a filename containing any exclude
  60. patterns, return the corresponding Borg flags as a tuple.
  61. '''
  62. exclude_filenames = tuple(location_config.get('exclude_from') or ()) + (
  63. (exclude_filename,) if exclude_filename else ()
  64. )
  65. exclude_from_flags = tuple(
  66. itertools.chain.from_iterable(
  67. ('--exclude-from', exclude_filename) for exclude_filename in exclude_filenames
  68. )
  69. )
  70. caches_flag = ('--exclude-caches',) if location_config.get('exclude_caches') else ()
  71. if_present = location_config.get('exclude_if_present')
  72. if_present_flags = ('--exclude-if-present', if_present) if if_present else ()
  73. keep_exclude_tags_flags = (
  74. ('--keep-exclude-tags',) if location_config.get('keep_exclude_tags') else ()
  75. )
  76. exclude_nodump_flags = ('--exclude-nodump',) if location_config.get('exclude_nodump') else ()
  77. return (
  78. exclude_from_flags
  79. + caches_flag
  80. + if_present_flags
  81. + keep_exclude_tags_flags
  82. + exclude_nodump_flags
  83. )
  84. BORGMATIC_SOURCE_DIRECTORY = '~/.borgmatic'
  85. def borgmatic_source_directories():
  86. '''
  87. Return a list of borgmatic-specific source directories used for state like database backups.
  88. '''
  89. return (
  90. [BORGMATIC_SOURCE_DIRECTORY]
  91. if os.path.exists(os.path.expanduser(BORGMATIC_SOURCE_DIRECTORY))
  92. else []
  93. )
  94. def create_archive(
  95. dry_run,
  96. repository,
  97. location_config,
  98. storage_config,
  99. local_path='borg',
  100. remote_path=None,
  101. progress=False,
  102. stats=False,
  103. json=False,
  104. ):
  105. '''
  106. Given vebosity/dry-run flags, a local or remote repository path, a location config dict, and a
  107. storage config dict, create a Borg archive and return Borg's JSON output (if any).
  108. '''
  109. sources = _expand_directories(
  110. location_config['source_directories'] + borgmatic_source_directories()
  111. )
  112. pattern_file = _write_pattern_file(location_config.get('patterns'))
  113. exclude_file = _write_pattern_file(
  114. _expand_home_directories(location_config.get('exclude_patterns'))
  115. )
  116. checkpoint_interval = storage_config.get('checkpoint_interval', None)
  117. chunker_params = storage_config.get('chunker_params', None)
  118. compression = storage_config.get('compression', None)
  119. remote_rate_limit = storage_config.get('remote_rate_limit', None)
  120. umask = storage_config.get('umask', None)
  121. lock_wait = storage_config.get('lock_wait', None)
  122. files_cache = location_config.get('files_cache')
  123. default_archive_name_format = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}'
  124. archive_name_format = storage_config.get('archive_name_format', default_archive_name_format)
  125. full_command = (
  126. (local_path, 'create')
  127. + _make_pattern_flags(location_config, pattern_file.name if pattern_file else None)
  128. + _make_exclude_flags(location_config, exclude_file.name if exclude_file else None)
  129. + (('--checkpoint-interval', str(checkpoint_interval)) if checkpoint_interval else ())
  130. + (('--chunker-params', chunker_params) if chunker_params else ())
  131. + (('--compression', compression) if compression else ())
  132. + (('--remote-ratelimit', str(remote_rate_limit)) if remote_rate_limit else ())
  133. + (('--one-file-system',) if location_config.get('one_file_system') else ())
  134. + (('--numeric-owner',) if location_config.get('numeric_owner') else ())
  135. + (('--noatime',) if location_config.get('atime') is False else ())
  136. + (('--noctime',) if location_config.get('ctime') is False else ())
  137. + (('--nobirthtime',) if location_config.get('birthtime') is False else ())
  138. + (('--read-special',) if location_config.get('read_special') else ())
  139. + (('--nobsdflags',) if location_config.get('bsd_flags') is False else ())
  140. + (('--files-cache', files_cache) if files_cache else ())
  141. + (('--remote-path', remote_path) if remote_path else ())
  142. + (('--umask', str(umask)) if umask else ())
  143. + (('--lock-wait', str(lock_wait)) if lock_wait else ())
  144. + (('--list', '--filter', 'AME-') if logger.isEnabledFor(logging.INFO) and not json else ())
  145. + (('--info',) if logger.getEffectiveLevel() == logging.INFO and not json else ())
  146. + (
  147. ('--stats',)
  148. if not dry_run and (logger.isEnabledFor(logging.INFO) or stats) and not json
  149. else ()
  150. )
  151. + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) and not json else ())
  152. + (('--dry-run',) if dry_run else ())
  153. + (('--progress',) if progress else ())
  154. + (('--json',) if json else ())
  155. + (
  156. '{repository}::{archive_name_format}'.format(
  157. repository=repository, archive_name_format=archive_name_format
  158. ),
  159. )
  160. + sources
  161. )
  162. # The progress output isn't compatible with captured and logged output, as progress messes with
  163. # the terminal directly.
  164. if progress:
  165. execute_command_without_capture(full_command)
  166. return
  167. if json:
  168. output_log_level = None
  169. elif stats:
  170. output_log_level = logging.WARNING
  171. else:
  172. output_log_level = logging.INFO
  173. return execute_command(full_command, output_log_level)