postgresql.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364
  1. import csv
  2. import itertools
  3. import logging
  4. import os
  5. import pathlib
  6. import shlex
  7. import borgmatic.borg.pattern
  8. import borgmatic.config.paths
  9. from borgmatic.execute import (
  10. execute_command,
  11. execute_command_and_capture_output,
  12. execute_command_with_processes,
  13. )
  14. from borgmatic.hooks.data_source import dump
  15. logger = logging.getLogger(__name__)
  16. def make_dump_path(base_directory): # pragma: no cover
  17. '''
  18. Given a base directory, make the corresponding dump path.
  19. '''
  20. return dump.make_data_source_dump_path(base_directory, 'postgresql_databases')
  21. def make_extra_environment(database, restore_connection_params=None):
  22. '''
  23. Make the extra_environment dict from the given database configuration. If restore connection
  24. params are given, this is for a restore operation.
  25. '''
  26. extra = dict()
  27. try:
  28. if restore_connection_params:
  29. extra['PGPASSWORD'] = restore_connection_params.get('password') or database.get(
  30. 'restore_password', database['password']
  31. )
  32. else:
  33. extra['PGPASSWORD'] = database['password']
  34. except (AttributeError, KeyError):
  35. pass
  36. if 'ssl_mode' in database:
  37. extra['PGSSLMODE'] = database['ssl_mode']
  38. if 'ssl_cert' in database:
  39. extra['PGSSLCERT'] = database['ssl_cert']
  40. if 'ssl_key' in database:
  41. extra['PGSSLKEY'] = database['ssl_key']
  42. if 'ssl_root_cert' in database:
  43. extra['PGSSLROOTCERT'] = database['ssl_root_cert']
  44. if 'ssl_crl' in database:
  45. extra['PGSSLCRL'] = database['ssl_crl']
  46. return extra
  47. EXCLUDED_DATABASE_NAMES = ('template0', 'template1')
  48. def database_names_to_dump(database, extra_environment, dry_run):
  49. '''
  50. Given a requested database config, return the corresponding sequence of database names to dump.
  51. In the case of "all" when a database format is given, query for the names of databases on the
  52. configured host and return them. For "all" without a database format, just return a sequence
  53. containing "all".
  54. '''
  55. requested_name = database['name']
  56. if requested_name != 'all':
  57. return (requested_name,)
  58. if not database.get('format'):
  59. return ('all',)
  60. if dry_run:
  61. return ()
  62. psql_command = tuple(
  63. shlex.quote(part) for part in shlex.split(database.get('psql_command') or 'psql')
  64. )
  65. list_command = (
  66. psql_command
  67. + ('--list', '--no-password', '--no-psqlrc', '--csv', '--tuples-only')
  68. + (('--host', database['hostname']) if 'hostname' in database else ())
  69. + (('--port', str(database['port'])) if 'port' in database else ())
  70. + (('--username', database['username']) if 'username' in database else ())
  71. + (tuple(database['list_options'].split(' ')) if 'list_options' in database else ())
  72. )
  73. logger.debug('Querying for "all" PostgreSQL databases to dump')
  74. list_output = execute_command_and_capture_output(
  75. list_command, extra_environment=extra_environment
  76. )
  77. return tuple(
  78. row[0]
  79. for row in csv.reader(list_output.splitlines(), delimiter=',', quotechar='"')
  80. if row[0] not in EXCLUDED_DATABASE_NAMES
  81. )
  82. def get_default_port(databases, config): # pragma: no cover
  83. return 5432
  84. def use_streaming(databases, config):
  85. '''
  86. Given a sequence of PostgreSQL database configuration dicts, a configuration dict (ignored),
  87. return whether streaming will be using during dumps.
  88. '''
  89. return any(database.get('format') != 'directory' for database in databases)
  90. def dump_data_sources(
  91. databases,
  92. config,
  93. config_paths,
  94. borgmatic_runtime_directory,
  95. patterns,
  96. dry_run,
  97. ):
  98. '''
  99. Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of
  100. dicts, one dict describing each database as per the configuration schema. Use the given
  101. borgmatic runtime directory to construct the destination path.
  102. Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
  103. pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
  104. Also append the the parent directory of the database dumps to the given patterns list, so the
  105. dumps actually get backed up.
  106. Raise ValueError if the databases to dump cannot be determined.
  107. '''
  108. dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
  109. processes = []
  110. logger.info(f'Dumping PostgreSQL databases{dry_run_label}')
  111. for database in databases:
  112. extra_environment = make_extra_environment(database)
  113. dump_path = make_dump_path(borgmatic_runtime_directory)
  114. dump_database_names = database_names_to_dump(database, extra_environment, dry_run)
  115. if not dump_database_names:
  116. if dry_run:
  117. continue
  118. raise ValueError('Cannot find any PostgreSQL databases to dump.')
  119. for database_name in dump_database_names:
  120. dump_format = database.get('format', None if database_name == 'all' else 'custom')
  121. default_dump_command = 'pg_dumpall' if database_name == 'all' else 'pg_dump'
  122. dump_command = tuple(
  123. shlex.quote(part)
  124. for part in shlex.split(database.get('pg_dump_command') or default_dump_command)
  125. )
  126. dump_filename = dump.make_data_source_dump_filename(
  127. dump_path,
  128. database_name,
  129. database.get('hostname'),
  130. database.get('port'),
  131. )
  132. if os.path.exists(dump_filename):
  133. logger.warning(
  134. f'Skipping duplicate dump of PostgreSQL database "{database_name}" to {dump_filename}'
  135. )
  136. continue
  137. command = (
  138. dump_command
  139. + (
  140. '--no-password',
  141. '--clean',
  142. '--if-exists',
  143. )
  144. + (('--host', shlex.quote(database['hostname'])) if 'hostname' in database else ())
  145. + (('--port', shlex.quote(str(database['port']))) if 'port' in database else ())
  146. + (
  147. ('--username', shlex.quote(database['username']))
  148. if 'username' in database
  149. else ()
  150. )
  151. + (('--no-owner',) if database.get('no_owner', False) else ())
  152. + (('--format', shlex.quote(dump_format)) if dump_format else ())
  153. + (('--file', shlex.quote(dump_filename)) if dump_format == 'directory' else ())
  154. + (
  155. tuple(shlex.quote(option) for option in database['options'].split(' '))
  156. if 'options' in database
  157. else ()
  158. )
  159. + (() if database_name == 'all' else (shlex.quote(database_name),))
  160. # Use shell redirection rather than the --file flag to sidestep synchronization issues
  161. # when pg_dump/pg_dumpall tries to write to a named pipe. But for the directory dump
  162. # format in a particular, a named destination is required, and redirection doesn't work.
  163. + (('>', shlex.quote(dump_filename)) if dump_format != 'directory' else ())
  164. )
  165. logger.debug(
  166. f'Dumping PostgreSQL database "{database_name}" to {dump_filename}{dry_run_label}'
  167. )
  168. if dry_run:
  169. continue
  170. if dump_format == 'directory':
  171. dump.create_parent_directory_for_dump(dump_filename)
  172. execute_command(
  173. command,
  174. shell=True,
  175. extra_environment=extra_environment,
  176. )
  177. else:
  178. dump.create_named_pipe_for_dump(dump_filename)
  179. processes.append(
  180. execute_command(
  181. command,
  182. shell=True,
  183. extra_environment=extra_environment,
  184. run_to_completion=False,
  185. )
  186. )
  187. if not dry_run:
  188. patterns.append(
  189. borgmatic.borg.pattern.Pattern(
  190. os.path.join(borgmatic_runtime_directory, 'postgresql_databases')
  191. )
  192. )
  193. return processes
  194. def remove_data_source_dumps(
  195. databases, config, borgmatic_runtime_directory, dry_run
  196. ): # pragma: no cover
  197. '''
  198. Remove all database dump files for this hook regardless of the given databases. Use the
  199. borgmatic runtime directory to construct the destination path. If this is a dry run, then don't
  200. actually remove anything.
  201. '''
  202. dump.remove_data_source_dumps(
  203. make_dump_path(borgmatic_runtime_directory), 'PostgreSQL', dry_run
  204. )
  205. def make_data_source_dump_patterns(
  206. databases, config, borgmatic_runtime_directory, name=None
  207. ): # pragma: no cover
  208. '''
  209. Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
  210. and a database name to match, return the corresponding glob patterns to match the database dump
  211. in an archive.
  212. '''
  213. borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
  214. return (
  215. dump.make_data_source_dump_filename(make_dump_path('borgmatic'), name, hostname='*'),
  216. dump.make_data_source_dump_filename(
  217. make_dump_path(borgmatic_runtime_directory), name, hostname='*'
  218. ),
  219. dump.make_data_source_dump_filename(
  220. make_dump_path(borgmatic_source_directory), name, hostname='*'
  221. ),
  222. )
  223. def restore_data_source_dump(
  224. hook_config,
  225. config,
  226. data_source,
  227. dry_run,
  228. extract_process,
  229. connection_params,
  230. borgmatic_runtime_directory,
  231. ):
  232. '''
  233. Restore a database from the given extract stream. The database is supplied as a data source
  234. configuration dict, but the given hook configuration is ignored. The given borgmatic runtime
  235. directory is used to construct the destination path (used for the directory format). If this is
  236. a dry run, then don't actually restore anything. Trigger the given active extract process (an
  237. instance of subprocess.Popen) to produce output to consume.
  238. If the extract process is None, then restore the dump from the filesystem rather than from an
  239. extract stream.
  240. Use the given connection parameters to connect to the database. The connection parameters are
  241. hostname, port, username, and password.
  242. '''
  243. dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
  244. hostname = connection_params['hostname'] or data_source.get(
  245. 'restore_hostname', data_source.get('hostname')
  246. )
  247. port = str(
  248. connection_params['port'] or data_source.get('restore_port', data_source.get('port', ''))
  249. )
  250. username = connection_params['username'] or data_source.get(
  251. 'restore_username', data_source.get('username')
  252. )
  253. all_databases = bool(data_source['name'] == 'all')
  254. dump_filename = dump.make_data_source_dump_filename(
  255. make_dump_path(borgmatic_runtime_directory),
  256. data_source['name'],
  257. data_source.get('hostname'),
  258. )
  259. psql_command = tuple(
  260. shlex.quote(part) for part in shlex.split(data_source.get('psql_command') or 'psql')
  261. )
  262. analyze_command = (
  263. psql_command
  264. + ('--no-password', '--no-psqlrc', '--quiet')
  265. + (('--host', hostname) if hostname else ())
  266. + (('--port', port) if port else ())
  267. + (('--username', username) if username else ())
  268. + (('--dbname', data_source['name']) if not all_databases else ())
  269. + (
  270. tuple(data_source['analyze_options'].split(' '))
  271. if 'analyze_options' in data_source
  272. else ()
  273. )
  274. + ('--command', 'ANALYZE')
  275. )
  276. use_psql_command = all_databases or data_source.get('format') == 'plain'
  277. pg_restore_command = tuple(
  278. shlex.quote(part)
  279. for part in shlex.split(data_source.get('pg_restore_command') or 'pg_restore')
  280. )
  281. restore_command = (
  282. (psql_command if use_psql_command else pg_restore_command)
  283. + ('--no-password',)
  284. + (('--no-psqlrc',) if use_psql_command else ('--if-exists', '--exit-on-error', '--clean'))
  285. + (('--dbname', data_source['name']) if not all_databases else ())
  286. + (('--host', hostname) if hostname else ())
  287. + (('--port', port) if port else ())
  288. + (('--username', username) if username else ())
  289. + (('--no-owner',) if data_source.get('no_owner', False) else ())
  290. + (
  291. tuple(data_source['restore_options'].split(' '))
  292. if 'restore_options' in data_source
  293. else ()
  294. )
  295. + (() if extract_process else (str(pathlib.Path(dump_filename)),))
  296. + tuple(
  297. itertools.chain.from_iterable(('--schema', schema) for schema in data_source['schemas'])
  298. if data_source.get('schemas')
  299. else ()
  300. )
  301. )
  302. extra_environment = make_extra_environment(
  303. data_source, restore_connection_params=connection_params
  304. )
  305. logger.debug(f"Restoring PostgreSQL database {data_source['name']}{dry_run_label}")
  306. if dry_run:
  307. return
  308. # Don't give Borg local path so as to error on warnings, as "borg extract" only gives a warning
  309. # if the restore paths don't exist in the archive.
  310. execute_command_with_processes(
  311. restore_command,
  312. [extract_process] if extract_process else [],
  313. output_log_level=logging.DEBUG,
  314. input_file=extract_process.stdout if extract_process else None,
  315. extra_environment=extra_environment,
  316. )
  317. execute_command(analyze_command, extra_environment=extra_environment)