postgresql.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378
  1. import csv
  2. import itertools
  3. import logging
  4. import os
  5. import pathlib
  6. import shlex
  7. import borgmatic.borg.pattern
  8. import borgmatic.config.paths
  9. import borgmatic.hooks.credential.tag
  10. from borgmatic.execute import (
  11. execute_command,
  12. execute_command_and_capture_output,
  13. execute_command_with_processes,
  14. )
  15. from borgmatic.hooks.data_source import dump
  16. logger = logging.getLogger(__name__)
  17. def make_dump_path(base_directory): # pragma: no cover
  18. '''
  19. Given a base directory, make the corresponding dump path.
  20. '''
  21. return dump.make_data_source_dump_path(base_directory, 'postgresql_databases')
  22. def make_extra_environment(database, restore_connection_params=None):
  23. '''
  24. Make the extra_environment dict from the given database configuration. If restore connection
  25. params are given, this is for a restore operation.
  26. '''
  27. extra = dict()
  28. try:
  29. if restore_connection_params:
  30. extra['PGPASSWORD'] = borgmatic.hooks.credential.tag.resolve_credential(
  31. restore_connection_params.get('password')
  32. or database.get('restore_password', database['password'])
  33. )
  34. else:
  35. extra['PGPASSWORD'] = borgmatic.hooks.credential.tag.resolve_credential(
  36. database['password']
  37. )
  38. except (AttributeError, KeyError):
  39. pass
  40. if 'ssl_mode' in database:
  41. extra['PGSSLMODE'] = database['ssl_mode']
  42. if 'ssl_cert' in database:
  43. extra['PGSSLCERT'] = database['ssl_cert']
  44. if 'ssl_key' in database:
  45. extra['PGSSLKEY'] = database['ssl_key']
  46. if 'ssl_root_cert' in database:
  47. extra['PGSSLROOTCERT'] = database['ssl_root_cert']
  48. if 'ssl_crl' in database:
  49. extra['PGSSLCRL'] = database['ssl_crl']
  50. return extra
  51. EXCLUDED_DATABASE_NAMES = ('template0', 'template1')
  52. def database_names_to_dump(database, extra_environment, dry_run):
  53. '''
  54. Given a requested database config, return the corresponding sequence of database names to dump.
  55. In the case of "all" when a database format is given, query for the names of databases on the
  56. configured host and return them. For "all" without a database format, just return a sequence
  57. containing "all".
  58. '''
  59. requested_name = database['name']
  60. if requested_name != 'all':
  61. return (requested_name,)
  62. if not database.get('format'):
  63. return ('all',)
  64. if dry_run:
  65. return ()
  66. psql_command = tuple(
  67. shlex.quote(part) for part in shlex.split(database.get('psql_command') or 'psql')
  68. )
  69. list_command = (
  70. psql_command
  71. + ('--list', '--no-password', '--no-psqlrc', '--csv', '--tuples-only')
  72. + (('--host', database['hostname']) if 'hostname' in database else ())
  73. + (('--port', str(database['port'])) if 'port' in database else ())
  74. + (
  75. ('--username', borgmatic.hooks.credential.tag.resolve_credential(database['username']))
  76. if 'username' in database
  77. else ()
  78. )
  79. + (tuple(database['list_options'].split(' ')) if 'list_options' in database else ())
  80. )
  81. logger.debug('Querying for "all" PostgreSQL databases to dump')
  82. list_output = execute_command_and_capture_output(
  83. list_command, extra_environment=extra_environment
  84. )
  85. return tuple(
  86. row[0]
  87. for row in csv.reader(list_output.splitlines(), delimiter=',', quotechar='"')
  88. if row[0] not in EXCLUDED_DATABASE_NAMES
  89. )
  90. def get_default_port(databases, config): # pragma: no cover
  91. return 5432
  92. def use_streaming(databases, config):
  93. '''
  94. Given a sequence of PostgreSQL database configuration dicts, a configuration dict (ignored),
  95. return whether streaming will be using during dumps.
  96. '''
  97. return any(database.get('format') != 'directory' for database in databases)
  98. def dump_data_sources(
  99. databases,
  100. config,
  101. config_paths,
  102. borgmatic_runtime_directory,
  103. patterns,
  104. dry_run,
  105. ):
  106. '''
  107. Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of
  108. dicts, one dict describing each database as per the configuration schema. Use the given
  109. borgmatic runtime directory to construct the destination path.
  110. Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
  111. pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
  112. Also append the the parent directory of the database dumps to the given patterns list, so the
  113. dumps actually get backed up.
  114. Raise ValueError if the databases to dump cannot be determined.
  115. '''
  116. dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
  117. processes = []
  118. logger.info(f'Dumping PostgreSQL databases{dry_run_label}')
  119. for database in databases:
  120. extra_environment = make_extra_environment(database)
  121. dump_path = make_dump_path(borgmatic_runtime_directory)
  122. dump_database_names = database_names_to_dump(database, extra_environment, dry_run)
  123. if not dump_database_names:
  124. if dry_run:
  125. continue
  126. raise ValueError('Cannot find any PostgreSQL databases to dump.')
  127. for database_name in dump_database_names:
  128. dump_format = database.get('format', None if database_name == 'all' else 'custom')
  129. default_dump_command = 'pg_dumpall' if database_name == 'all' else 'pg_dump'
  130. dump_command = tuple(
  131. shlex.quote(part)
  132. for part in shlex.split(database.get('pg_dump_command') or default_dump_command)
  133. )
  134. dump_filename = dump.make_data_source_dump_filename(
  135. dump_path,
  136. database_name,
  137. database.get('hostname'),
  138. database.get('port'),
  139. )
  140. if os.path.exists(dump_filename):
  141. logger.warning(
  142. f'Skipping duplicate dump of PostgreSQL database "{database_name}" to {dump_filename}'
  143. )
  144. continue
  145. command = (
  146. dump_command
  147. + (
  148. '--no-password',
  149. '--clean',
  150. '--if-exists',
  151. )
  152. + (('--host', shlex.quote(database['hostname'])) if 'hostname' in database else ())
  153. + (('--port', shlex.quote(str(database['port']))) if 'port' in database else ())
  154. + (
  155. (
  156. '--username',
  157. shlex.quote(
  158. borgmatic.hooks.credential.tag.resolve_credential(database['username'])
  159. ),
  160. )
  161. if 'username' in database
  162. else ()
  163. )
  164. + (('--no-owner',) if database.get('no_owner', False) else ())
  165. + (('--format', shlex.quote(dump_format)) if dump_format else ())
  166. + (('--file', shlex.quote(dump_filename)) if dump_format == 'directory' else ())
  167. + (
  168. tuple(shlex.quote(option) for option in database['options'].split(' '))
  169. if 'options' in database
  170. else ()
  171. )
  172. + (() if database_name == 'all' else (shlex.quote(database_name),))
  173. # Use shell redirection rather than the --file flag to sidestep synchronization issues
  174. # when pg_dump/pg_dumpall tries to write to a named pipe. But for the directory dump
  175. # format in a particular, a named destination is required, and redirection doesn't work.
  176. + (('>', shlex.quote(dump_filename)) if dump_format != 'directory' else ())
  177. )
  178. logger.debug(
  179. f'Dumping PostgreSQL database "{database_name}" to {dump_filename}{dry_run_label}'
  180. )
  181. if dry_run:
  182. continue
  183. if dump_format == 'directory':
  184. dump.create_parent_directory_for_dump(dump_filename)
  185. execute_command(
  186. command,
  187. shell=True,
  188. extra_environment=extra_environment,
  189. )
  190. else:
  191. dump.create_named_pipe_for_dump(dump_filename)
  192. processes.append(
  193. execute_command(
  194. command,
  195. shell=True,
  196. extra_environment=extra_environment,
  197. run_to_completion=False,
  198. )
  199. )
  200. if not dry_run:
  201. patterns.append(
  202. borgmatic.borg.pattern.Pattern(
  203. os.path.join(borgmatic_runtime_directory, 'postgresql_databases')
  204. )
  205. )
  206. return processes
  207. def remove_data_source_dumps(
  208. databases, config, borgmatic_runtime_directory, dry_run
  209. ): # pragma: no cover
  210. '''
  211. Remove all database dump files for this hook regardless of the given databases. Use the
  212. borgmatic runtime directory to construct the destination path. If this is a dry run, then don't
  213. actually remove anything.
  214. '''
  215. dump.remove_data_source_dumps(
  216. make_dump_path(borgmatic_runtime_directory), 'PostgreSQL', dry_run
  217. )
  218. def make_data_source_dump_patterns(
  219. databases, config, borgmatic_runtime_directory, name=None
  220. ): # pragma: no cover
  221. '''
  222. Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
  223. and a database name to match, return the corresponding glob patterns to match the database dump
  224. in an archive.
  225. '''
  226. borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
  227. return (
  228. dump.make_data_source_dump_filename(make_dump_path('borgmatic'), name, hostname='*'),
  229. dump.make_data_source_dump_filename(
  230. make_dump_path(borgmatic_runtime_directory), name, hostname='*'
  231. ),
  232. dump.make_data_source_dump_filename(
  233. make_dump_path(borgmatic_source_directory), name, hostname='*'
  234. ),
  235. )
  236. def restore_data_source_dump(
  237. hook_config,
  238. config,
  239. data_source,
  240. dry_run,
  241. extract_process,
  242. connection_params,
  243. borgmatic_runtime_directory,
  244. ):
  245. '''
  246. Restore a database from the given extract stream. The database is supplied as a data source
  247. configuration dict, but the given hook configuration is ignored. The given borgmatic runtime
  248. directory is used to construct the destination path (used for the directory format). If this is
  249. a dry run, then don't actually restore anything. Trigger the given active extract process (an
  250. instance of subprocess.Popen) to produce output to consume.
  251. If the extract process is None, then restore the dump from the filesystem rather than from an
  252. extract stream.
  253. Use the given connection parameters to connect to the database. The connection parameters are
  254. hostname, port, username, and password.
  255. '''
  256. dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
  257. hostname = connection_params['hostname'] or data_source.get(
  258. 'restore_hostname', data_source.get('hostname')
  259. )
  260. port = str(
  261. connection_params['port'] or data_source.get('restore_port', data_source.get('port', ''))
  262. )
  263. username = borgmatic.hooks.credential.tag.resolve_credential(
  264. connection_params['username']
  265. or data_source.get('restore_username', data_source.get('username'))
  266. )
  267. all_databases = bool(data_source['name'] == 'all')
  268. dump_filename = dump.make_data_source_dump_filename(
  269. make_dump_path(borgmatic_runtime_directory),
  270. data_source['name'],
  271. data_source.get('hostname'),
  272. )
  273. psql_command = tuple(
  274. shlex.quote(part) for part in shlex.split(data_source.get('psql_command') or 'psql')
  275. )
  276. analyze_command = (
  277. psql_command
  278. + ('--no-password', '--no-psqlrc', '--quiet')
  279. + (('--host', hostname) if hostname else ())
  280. + (('--port', port) if port else ())
  281. + (('--username', username) if username else ())
  282. + (('--dbname', data_source['name']) if not all_databases else ())
  283. + (
  284. tuple(data_source['analyze_options'].split(' '))
  285. if 'analyze_options' in data_source
  286. else ()
  287. )
  288. + ('--command', 'ANALYZE')
  289. )
  290. use_psql_command = all_databases or data_source.get('format') == 'plain'
  291. pg_restore_command = tuple(
  292. shlex.quote(part)
  293. for part in shlex.split(data_source.get('pg_restore_command') or 'pg_restore')
  294. )
  295. restore_command = (
  296. (psql_command if use_psql_command else pg_restore_command)
  297. + ('--no-password',)
  298. + (('--no-psqlrc',) if use_psql_command else ('--if-exists', '--exit-on-error', '--clean'))
  299. + (('--dbname', data_source['name']) if not all_databases else ())
  300. + (('--host', hostname) if hostname else ())
  301. + (('--port', port) if port else ())
  302. + (('--username', username) if username else ())
  303. + (('--no-owner',) if data_source.get('no_owner', False) else ())
  304. + (
  305. tuple(data_source['restore_options'].split(' '))
  306. if 'restore_options' in data_source
  307. else ()
  308. )
  309. + (() if extract_process else (str(pathlib.Path(dump_filename)),))
  310. + tuple(
  311. itertools.chain.from_iterable(('--schema', schema) for schema in data_source['schemas'])
  312. if data_source.get('schemas')
  313. else ()
  314. )
  315. )
  316. extra_environment = make_extra_environment(
  317. data_source, restore_connection_params=connection_params
  318. )
  319. logger.debug(f"Restoring PostgreSQL database {data_source['name']}{dry_run_label}")
  320. if dry_run:
  321. return
  322. # Don't give Borg local path so as to error on warnings, as "borg extract" only gives a warning
  323. # if the restore paths don't exist in the archive.
  324. execute_command_with_processes(
  325. restore_command,
  326. [extract_process] if extract_process else [],
  327. output_log_level=logging.DEBUG,
  328. input_file=extract_process.stdout if extract_process else None,
  329. extra_environment=extra_environment,
  330. )
  331. execute_command(analyze_command, extra_environment=extra_environment)