dump.py 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110
  1. import fnmatch
  2. import json
  3. import logging
  4. import os
  5. import shutil
  6. import borgmatic.actions.restore
  7. logger = logging.getLogger(__name__)
  8. IS_A_HOOK = False
  9. def make_data_source_dump_path(borgmatic_runtime_directory, data_source_hook_name):
  10. '''
  11. Given a borgmatic runtime directory and a data source hook name, construct a data source dump
  12. path.
  13. '''
  14. return os.path.join(borgmatic_runtime_directory, data_source_hook_name)
  15. def make_data_source_dump_filename(dump_path, name, hostname=None, port=None):
  16. '''
  17. Based on the given dump directory path, data source name, hostname, and port, return a filename
  18. to use for the data source dump. The hostname defaults to localhost.
  19. Raise ValueError if the data source name is invalid.
  20. '''
  21. if os.path.sep in name:
  22. raise ValueError(f'Invalid data source name {name}')
  23. return os.path.join(
  24. dump_path,
  25. (hostname or 'localhost') + ('' if port is None else f':{port}'),
  26. name,
  27. )
  28. def write_data_source_dumps_metadata(borgmatic_runtime_directory, hook_name, dumps_metadata):
  29. '''
  30. Given the borgmatic runtime directory, a data source hook name, and a sequence of
  31. borgmatic.actions.restore.Dump instances of dump metadata, write a metadata file describing all
  32. of those dumps. This metadata is being dumped so that it's available upon restore, e.g. to
  33. support the user selecting which data source(s) should be restored.
  34. '''
  35. dumps_metadata_path = os.path.join(borgmatic_runtime_directory, hook_name, 'dumps.json')
  36. try:
  37. with open( dumps_metadata_path, 'w') as metadata_file:
  38. json.dump([dump._asdict() for dump in dumps_metadata], metadata_file, sort_keys=True)
  39. except OSError as error:
  40. raise ValueError(f'Error writing to dumps metadata at {dumps_metadata_path}: {error}')
  41. def parse_data_source_dumps_metadata(dumps_json, dumps_metadata_path):
  42. '''
  43. Given a dumps metadata JSON string as extracted from an archive, parse it into a tuple of
  44. borgmatic.actions.restore.Dump instances and return them.
  45. '''
  46. try:
  47. return tuple(borgmatic.actions.restore.Dump(**dump) for dump in json.loads(dumps_json))
  48. except (json.JSONDecodeError, TypeError) as error:
  49. raise ValueError(
  50. f'Cannot read archive data source dumps metadata at {dumps_metadata_path} due to invalid JSON: {error}',
  51. )
  52. def create_parent_directory_for_dump(dump_path):
  53. '''
  54. Create a directory to contain the given dump path.
  55. '''
  56. os.makedirs(os.path.dirname(dump_path), mode=0o700, exist_ok=True)
  57. def create_named_pipe_for_dump(dump_path):
  58. '''
  59. Create a named pipe at the given dump path.
  60. '''
  61. create_parent_directory_for_dump(dump_path)
  62. os.mkfifo(dump_path, mode=0o600)
  63. def remove_data_source_dumps(dump_path, data_source_type_name, dry_run):
  64. '''
  65. Remove all data source dumps in the given dump directory path (including the directory itself).
  66. If this is a dry run, then don't actually remove anything.
  67. '''
  68. dry_run_label = ' (dry run; not actually removing anything)' if dry_run else ''
  69. logger.debug(f'Removing {data_source_type_name} data source dumps{dry_run_label}')
  70. if dry_run:
  71. return
  72. if os.path.exists(dump_path):
  73. shutil.rmtree(dump_path)
  74. def convert_glob_patterns_to_borg_pattern(patterns):
  75. '''
  76. Convert a sequence of shell glob patterns like "/etc/*", "/tmp/*" to the corresponding Borg
  77. regular expression archive pattern as a single string like "re:etc/.*|tmp/.*".
  78. '''
  79. # Remove the "\Z" generated by fnmatch.translate() because we don't want the pattern to match
  80. # only at the end of a path, as directory format dumps require extracting files with paths
  81. # longer than the pattern. E.g., a pattern of "borgmatic/*/foo_databases/test" should also match
  82. # paths like "borgmatic/*/foo_databases/test/toc.dat"
  83. return 're:' + '|'.join(
  84. fnmatch.translate(pattern.lstrip('/')).replace('\\Z', '') for pattern in patterns
  85. )