archiver.py 87 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695
  1. from binascii import hexlify, unhexlify
  2. from datetime import datetime
  3. from hashlib import sha256
  4. from operator import attrgetter
  5. import argparse
  6. import functools
  7. import inspect
  8. import io
  9. import os
  10. import re
  11. import shlex
  12. import signal
  13. import stat
  14. import sys
  15. import textwrap
  16. import traceback
  17. import collections
  18. from . import __version__
  19. from .helpers import Error, location_validator, archivename_validator, format_line, format_time, format_file_size, \
  20. parse_pattern, PathPrefixPattern, to_localtime, timestamp, safe_timestamp, \
  21. get_cache_dir, prune_within, prune_split, \
  22. Manifest, NoManifestError, remove_surrogates, update_excludes, format_archive, check_extension_modules, Statistics, \
  23. dir_is_tagged, bigint_to_int, ChunkerParams, CompressionSpec, PrefixSpec, is_slow_msgpack, yes, sysinfo, \
  24. EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR, log_multi, PatternMatcher, ErrorIgnoringTextIOWrapper
  25. from .logger import create_logger, setup_logging
  26. logger = create_logger()
  27. from .compress import Compressor, COMPR_BUFFER
  28. from .upgrader import AtticRepositoryUpgrader, BorgRepositoryUpgrader
  29. from .repository import Repository
  30. from .cache import Cache
  31. from .key import key_creator, RepoKey, PassphraseKey
  32. from .archive import backup_io, BackupOSError, Archive, ArchiveChecker, CHUNKER_PARAMS, is_special
  33. from .remote import RepositoryServer, RemoteRepository, cache_if_remote
  34. has_lchflags = hasattr(os, 'lchflags')
  35. # default umask, overriden by --umask, defaults to read/write only for owner
  36. UMASK_DEFAULT = 0o077
  37. DASHES = '-' * 78
  38. def argument(args, str_or_bool):
  39. """If bool is passed, return it. If str is passed, retrieve named attribute from args."""
  40. if isinstance(str_or_bool, str):
  41. return getattr(args, str_or_bool)
  42. return str_or_bool
  43. def with_repository(fake=False, create=False, lock=True, exclusive=False, manifest=True, cache=False):
  44. """
  45. Method decorator for subcommand-handling methods: do_XYZ(self, args, repository, …)
  46. If a parameter (where allowed) is a str the attribute named of args is used instead.
  47. :param fake: (str or bool) use None instead of repository, don't do anything else
  48. :param create: create repository
  49. :param lock: lock repository
  50. :param exclusive: (str or bool) lock repository exclusively (for writing)
  51. :param manifest: load manifest and key, pass them as keyword arguments
  52. :param cache: open cache, pass it as keyword argument (implies manifest)
  53. """
  54. def decorator(method):
  55. @functools.wraps(method)
  56. def wrapper(self, args, **kwargs):
  57. location = args.location # note: 'location' must be always present in args
  58. append_only = getattr(args, 'append_only', False)
  59. if argument(args, fake):
  60. return method(self, args, repository=None, **kwargs)
  61. elif location.proto == 'ssh':
  62. repository = RemoteRepository(location, create=create, exclusive=argument(args, exclusive),
  63. lock_wait=self.lock_wait, lock=lock, append_only=append_only, args=args)
  64. else:
  65. repository = Repository(location.path, create=create, exclusive=argument(args, exclusive),
  66. lock_wait=self.lock_wait, lock=lock,
  67. append_only=append_only)
  68. with repository:
  69. if manifest or cache:
  70. kwargs['manifest'], kwargs['key'] = Manifest.load(repository)
  71. if cache:
  72. with Cache(repository, kwargs['key'], kwargs['manifest'],
  73. do_files=getattr(args, 'cache_files', False), lock_wait=self.lock_wait) as cache_:
  74. return method(self, args, repository=repository, cache=cache_, **kwargs)
  75. else:
  76. return method(self, args, repository=repository, **kwargs)
  77. return wrapper
  78. return decorator
  79. def with_archive(method):
  80. @functools.wraps(method)
  81. def wrapper(self, args, repository, key, manifest, **kwargs):
  82. archive = Archive(repository, key, manifest, args.location.archive,
  83. numeric_owner=getattr(args, 'numeric_owner', False), cache=kwargs.get('cache'))
  84. return method(self, args, repository=repository, manifest=manifest, key=key, archive=archive, **kwargs)
  85. return wrapper
  86. class Archiver:
  87. def __init__(self, lock_wait=None):
  88. self.exit_code = EXIT_SUCCESS
  89. self.lock_wait = lock_wait
  90. def print_error(self, msg, *args):
  91. msg = args and msg % args or msg
  92. self.exit_code = EXIT_ERROR
  93. logger.error(msg)
  94. def print_warning(self, msg, *args):
  95. msg = args and msg % args or msg
  96. self.exit_code = EXIT_WARNING # we do not terminate here, so it is a warning
  97. logger.warning(msg)
  98. def print_file_status(self, status, path):
  99. if self.output_list and (self.output_filter is None or status in self.output_filter):
  100. logger.info("%1s %s", status, remove_surrogates(path))
  101. def do_serve(self, args):
  102. """Start in server mode. This command is usually not used manually.
  103. """
  104. return RepositoryServer(restrict_to_paths=args.restrict_to_paths, append_only=args.append_only).serve()
  105. @with_repository(create=True, exclusive=True, manifest=False)
  106. def do_init(self, args, repository):
  107. """Initialize an empty repository"""
  108. logger.info('Initializing repository at "%s"' % args.location.canonical_path())
  109. key = key_creator(repository, args)
  110. manifest = Manifest(key, repository)
  111. manifest.key = key
  112. manifest.write()
  113. repository.commit()
  114. with Cache(repository, key, manifest, warn_if_unencrypted=False):
  115. pass
  116. return self.exit_code
  117. @with_repository(exclusive=True, manifest=False)
  118. def do_check(self, args, repository):
  119. """Check repository consistency"""
  120. if args.repair:
  121. msg = ("'check --repair' is an experimental feature that might result in data loss." +
  122. "\n" +
  123. "Type 'YES' if you understand this and want to continue: ")
  124. if not yes(msg, false_msg="Aborting.", truish=('YES', ),
  125. env_var_override='BORG_CHECK_I_KNOW_WHAT_I_AM_DOING'):
  126. return EXIT_ERROR
  127. if not args.archives_only:
  128. if not repository.check(repair=args.repair, save_space=args.save_space):
  129. return EXIT_WARNING
  130. if not args.repo_only and not ArchiveChecker().check(
  131. repository, repair=args.repair, archive=args.location.archive,
  132. last=args.last, prefix=args.prefix, save_space=args.save_space):
  133. return EXIT_WARNING
  134. return EXIT_SUCCESS
  135. @with_repository()
  136. def do_change_passphrase(self, args, repository, manifest, key):
  137. """Change repository key file passphrase"""
  138. key.change_passphrase()
  139. return EXIT_SUCCESS
  140. @with_repository(manifest=False)
  141. def do_migrate_to_repokey(self, args, repository):
  142. """Migrate passphrase -> repokey"""
  143. manifest_data = repository.get(Manifest.MANIFEST_ID)
  144. key_old = PassphraseKey.detect(repository, manifest_data)
  145. key_new = RepoKey(repository)
  146. key_new.target = repository
  147. key_new.repository_id = repository.id
  148. key_new.enc_key = key_old.enc_key
  149. key_new.enc_hmac_key = key_old.enc_hmac_key
  150. key_new.id_key = key_old.id_key
  151. key_new.chunk_seed = key_old.chunk_seed
  152. key_new.change_passphrase() # option to change key protection passphrase, save
  153. return EXIT_SUCCESS
  154. @with_repository(fake='dry_run', exclusive=True)
  155. def do_create(self, args, repository, manifest=None, key=None):
  156. """Create new archive"""
  157. matcher = PatternMatcher(fallback=True)
  158. if args.excludes:
  159. matcher.add(args.excludes, False)
  160. def create_inner(archive, cache):
  161. # Add cache dir to inode_skip list
  162. skip_inodes = set()
  163. try:
  164. st = os.stat(get_cache_dir())
  165. skip_inodes.add((st.st_ino, st.st_dev))
  166. except OSError:
  167. pass
  168. # Add local repository dir to inode_skip list
  169. if not args.location.host:
  170. try:
  171. st = os.stat(args.location.path)
  172. skip_inodes.add((st.st_ino, st.st_dev))
  173. except OSError:
  174. pass
  175. for path in args.paths:
  176. if path == '-': # stdin
  177. path = 'stdin'
  178. if not dry_run:
  179. try:
  180. status = archive.process_stdin(path, cache)
  181. except BackupOSError as e:
  182. status = 'E'
  183. self.print_warning('%s: %s', path, e)
  184. else:
  185. status = '-'
  186. self.print_file_status(status, path)
  187. continue
  188. path = os.path.normpath(path)
  189. if args.one_file_system:
  190. try:
  191. restrict_dev = os.lstat(path).st_dev
  192. except OSError as e:
  193. self.print_warning('%s: %s', path, e)
  194. continue
  195. else:
  196. restrict_dev = None
  197. self._process(archive, cache, matcher, args.exclude_caches, args.exclude_if_present,
  198. args.keep_tag_files, skip_inodes, path, restrict_dev,
  199. read_special=args.read_special, dry_run=dry_run)
  200. if not dry_run:
  201. archive.save(timestamp=args.timestamp)
  202. if args.progress:
  203. archive.stats.show_progress(final=True)
  204. if args.stats:
  205. archive.end = datetime.utcnow()
  206. log_multi(DASHES,
  207. str(archive),
  208. DASHES,
  209. str(archive.stats),
  210. str(cache),
  211. DASHES)
  212. self.output_filter = args.output_filter
  213. self.output_list = args.output_list
  214. self.ignore_inode = args.ignore_inode
  215. dry_run = args.dry_run
  216. t0 = datetime.utcnow()
  217. if not dry_run:
  218. compr_args = dict(buffer=COMPR_BUFFER)
  219. compr_args.update(args.compression)
  220. key.compressor = Compressor(**compr_args)
  221. with Cache(repository, key, manifest, do_files=args.cache_files, lock_wait=self.lock_wait) as cache:
  222. archive = Archive(repository, key, manifest, args.location.archive, cache=cache,
  223. create=True, checkpoint_interval=args.checkpoint_interval,
  224. numeric_owner=args.numeric_owner, progress=args.progress,
  225. chunker_params=args.chunker_params, start=t0)
  226. create_inner(archive, cache)
  227. else:
  228. create_inner(None, None)
  229. return self.exit_code
  230. def _process(self, archive, cache, matcher, exclude_caches, exclude_if_present,
  231. keep_tag_files, skip_inodes, path, restrict_dev,
  232. read_special=False, dry_run=False):
  233. if not matcher.match(path):
  234. return
  235. try:
  236. st = os.lstat(path)
  237. except OSError as e:
  238. self.print_warning('%s: %s', path, e)
  239. return
  240. if (st.st_ino, st.st_dev) in skip_inodes:
  241. return
  242. # Entering a new filesystem?
  243. if restrict_dev is not None and st.st_dev != restrict_dev:
  244. return
  245. status = None
  246. # Ignore if nodump flag is set
  247. if has_lchflags and (st.st_flags & stat.UF_NODUMP):
  248. return
  249. if stat.S_ISREG(st.st_mode):
  250. if not dry_run:
  251. try:
  252. status = archive.process_file(path, st, cache, self.ignore_inode)
  253. except BackupOSError as e:
  254. status = 'E'
  255. self.print_warning('%s: %s', path, e)
  256. elif stat.S_ISDIR(st.st_mode):
  257. tag_paths = dir_is_tagged(path, exclude_caches, exclude_if_present)
  258. if tag_paths:
  259. if keep_tag_files and not dry_run:
  260. archive.process_dir(path, st)
  261. for tag_path in tag_paths:
  262. self._process(archive, cache, matcher, exclude_caches, exclude_if_present,
  263. keep_tag_files, skip_inodes, tag_path, restrict_dev,
  264. read_special=read_special, dry_run=dry_run)
  265. return
  266. if not dry_run:
  267. status = archive.process_dir(path, st)
  268. try:
  269. entries = os.listdir(path)
  270. except OSError as e:
  271. status = 'E'
  272. self.print_warning('%s: %s', path, e)
  273. else:
  274. for filename in sorted(entries):
  275. entry_path = os.path.normpath(os.path.join(path, filename))
  276. self._process(archive, cache, matcher, exclude_caches, exclude_if_present,
  277. keep_tag_files, skip_inodes, entry_path, restrict_dev,
  278. read_special=read_special, dry_run=dry_run)
  279. elif stat.S_ISLNK(st.st_mode):
  280. if not dry_run:
  281. if not read_special:
  282. status = archive.process_symlink(path, st)
  283. else:
  284. st_target = os.stat(path)
  285. if is_special(st_target.st_mode):
  286. status = archive.process_file(path, st_target, cache)
  287. else:
  288. status = archive.process_symlink(path, st)
  289. elif stat.S_ISFIFO(st.st_mode):
  290. if not dry_run:
  291. if not read_special:
  292. status = archive.process_fifo(path, st)
  293. else:
  294. status = archive.process_file(path, st, cache)
  295. elif stat.S_ISCHR(st.st_mode) or stat.S_ISBLK(st.st_mode):
  296. if not dry_run:
  297. if not read_special:
  298. status = archive.process_dev(path, st)
  299. else:
  300. status = archive.process_file(path, st, cache)
  301. elif stat.S_ISSOCK(st.st_mode):
  302. # Ignore unix sockets
  303. return
  304. elif stat.S_ISDOOR(st.st_mode):
  305. # Ignore Solaris doors
  306. return
  307. elif stat.S_ISPORT(st.st_mode):
  308. # Ignore Solaris event ports
  309. return
  310. else:
  311. self.print_warning('Unknown file type: %s', path)
  312. return
  313. # Status output
  314. if status is None:
  315. if not dry_run:
  316. status = '?' # need to add a status code somewhere
  317. else:
  318. status = '-' # dry run, item was not backed up
  319. self.print_file_status(status, path)
  320. @with_repository()
  321. @with_archive
  322. def do_extract(self, args, repository, manifest, key, archive):
  323. """Extract archive contents"""
  324. # be restrictive when restoring files, restore permissions later
  325. if sys.getfilesystemencoding() == 'ascii':
  326. logger.warning('Warning: File system encoding is "ascii", extracting non-ascii filenames will not be supported.')
  327. if sys.platform.startswith(('linux', 'freebsd', 'netbsd', 'openbsd', 'darwin', )):
  328. logger.warning('Hint: You likely need to fix your locale setup. E.g. install locales and use: LANG=en_US.UTF-8')
  329. matcher = PatternMatcher()
  330. if args.excludes:
  331. matcher.add(args.excludes, False)
  332. include_patterns = []
  333. if args.paths:
  334. include_patterns.extend(parse_pattern(i, PathPrefixPattern) for i in args.paths)
  335. matcher.add(include_patterns, True)
  336. matcher.fallback = not include_patterns
  337. output_list = args.output_list
  338. dry_run = args.dry_run
  339. stdout = args.stdout
  340. sparse = args.sparse
  341. strip_components = args.strip_components
  342. dirs = []
  343. for item in archive.iter_items(lambda item: matcher.match(item[b'path']), preload=True):
  344. orig_path = item[b'path']
  345. if strip_components:
  346. item[b'path'] = os.sep.join(orig_path.split(os.sep)[strip_components:])
  347. if not item[b'path']:
  348. continue
  349. if not args.dry_run:
  350. while dirs and not item[b'path'].startswith(dirs[-1][b'path']):
  351. dir_item = dirs.pop(-1)
  352. try:
  353. archive.extract_item(dir_item, stdout=stdout)
  354. except BackupOSError as e:
  355. self.print_warning('%s: %s', remove_surrogates(dir_item[b'path']), e)
  356. if output_list:
  357. logger.info(remove_surrogates(orig_path))
  358. try:
  359. if dry_run:
  360. archive.extract_item(item, dry_run=True)
  361. else:
  362. if stat.S_ISDIR(item[b'mode']):
  363. dirs.append(item)
  364. archive.extract_item(item, restore_attrs=False)
  365. else:
  366. archive.extract_item(item, stdout=stdout, sparse=sparse)
  367. except BackupOSError as e:
  368. self.print_warning('%s: %s', remove_surrogates(orig_path), e)
  369. if not args.dry_run:
  370. while dirs:
  371. dir_item = dirs.pop(-1)
  372. try:
  373. archive.extract_item(dir_item)
  374. except BackupOSError as e:
  375. self.print_warning('%s: %s', remove_surrogates(dir_item[b'path']), e)
  376. for pattern in include_patterns:
  377. if pattern.match_count == 0:
  378. self.print_warning("Include pattern '%s' never matched.", pattern)
  379. return self.exit_code
  380. @with_repository(exclusive=True, cache=True)
  381. @with_archive
  382. def do_rename(self, args, repository, manifest, key, cache, archive):
  383. """Rename an existing archive"""
  384. archive.rename(args.name)
  385. manifest.write()
  386. repository.commit()
  387. cache.commit()
  388. return self.exit_code
  389. @with_repository(exclusive=True, manifest=False)
  390. def do_delete(self, args, repository):
  391. """Delete an existing repository or archive"""
  392. if args.location.archive:
  393. manifest, key = Manifest.load(repository)
  394. with Cache(repository, key, manifest, lock_wait=self.lock_wait) as cache:
  395. archive = Archive(repository, key, manifest, args.location.archive, cache=cache)
  396. stats = Statistics()
  397. archive.delete(stats, progress=args.progress, forced=args.forced)
  398. manifest.write()
  399. repository.commit(save_space=args.save_space)
  400. cache.commit()
  401. logger.info("Archive deleted.")
  402. if args.stats:
  403. log_multi(DASHES,
  404. stats.summary.format(label='Deleted data:', stats=stats),
  405. str(cache),
  406. DASHES)
  407. else:
  408. if not args.cache_only:
  409. msg = []
  410. try:
  411. manifest, key = Manifest.load(repository)
  412. except NoManifestError:
  413. msg.append("You requested to completely DELETE the repository *including* all archives it may contain.")
  414. msg.append("This repository seems to have no manifest, so we can't tell anything about its contents.")
  415. else:
  416. msg.append("You requested to completely DELETE the repository *including* all archives it contains:")
  417. for archive_info in manifest.list_archive_infos(sort_by='ts'):
  418. msg.append(format_archive(archive_info))
  419. msg.append("Type 'YES' if you understand this and want to continue: ")
  420. msg = '\n'.join(msg)
  421. if not yes(msg, false_msg="Aborting.", truish=('YES', ),
  422. env_var_override='BORG_DELETE_I_KNOW_WHAT_I_AM_DOING'):
  423. self.exit_code = EXIT_ERROR
  424. return self.exit_code
  425. repository.destroy()
  426. logger.info("Repository deleted.")
  427. Cache.destroy(repository)
  428. logger.info("Cache deleted.")
  429. return self.exit_code
  430. @with_repository()
  431. def do_mount(self, args, repository, manifest, key):
  432. """Mount archive or an entire repository as a FUSE fileystem"""
  433. try:
  434. from .fuse import FuseOperations
  435. except ImportError as e:
  436. self.print_error('Loading fuse support failed [ImportError: %s]' % str(e))
  437. return self.exit_code
  438. if not os.path.isdir(args.mountpoint) or not os.access(args.mountpoint, os.R_OK | os.W_OK | os.X_OK):
  439. self.print_error('%s: Mountpoint must be a writable directory' % args.mountpoint)
  440. return self.exit_code
  441. with cache_if_remote(repository) as cached_repo:
  442. if args.location.archive:
  443. archive = Archive(repository, key, manifest, args.location.archive)
  444. else:
  445. archive = None
  446. operations = FuseOperations(key, repository, manifest, archive, cached_repo)
  447. logger.info("Mounting filesystem")
  448. try:
  449. operations.mount(args.mountpoint, args.options, args.foreground)
  450. except RuntimeError:
  451. # Relevant error message already printed to stderr by fuse
  452. self.exit_code = EXIT_ERROR
  453. return self.exit_code
  454. @with_repository()
  455. def do_list(self, args, repository, manifest, key):
  456. """List archive or repository contents"""
  457. if args.location.archive:
  458. archive = Archive(repository, key, manifest, args.location.archive)
  459. """use_user_format flag is used to speed up default listing.
  460. When user issues format options, listing is a bit slower, but more keys are available and
  461. precalculated.
  462. """
  463. use_user_format = args.listformat is not None
  464. if use_user_format:
  465. list_format = args.listformat
  466. elif args.short:
  467. list_format = "{path}{LF}"
  468. else:
  469. list_format = "{mode} {user:6} {group:6} {size:8d} {isomtime} {path}{extra}{LF}"
  470. for item in archive.iter_items():
  471. mode = stat.filemode(item[b'mode'])
  472. type = mode[0]
  473. size = 0
  474. if type == '-':
  475. try:
  476. size = sum(size for _, size, _ in item[b'chunks'])
  477. except KeyError:
  478. pass
  479. mtime = safe_timestamp(item[b'mtime'])
  480. if use_user_format:
  481. atime = safe_timestamp(item.get(b'atime') or item[b'mtime'])
  482. ctime = safe_timestamp(item.get(b'ctime') or item[b'mtime'])
  483. if b'source' in item:
  484. source = item[b'source']
  485. if type == 'l':
  486. extra = ' -> %s' % item[b'source']
  487. else:
  488. mode = 'h' + mode[1:]
  489. extra = ' link to %s' % item[b'source']
  490. else:
  491. extra = ''
  492. source = ''
  493. item_data = {
  494. 'mode': mode,
  495. 'user': item[b'user'] or item[b'uid'],
  496. 'group': item[b'group'] or item[b'gid'],
  497. 'size': size,
  498. 'isomtime': format_time(mtime),
  499. 'path': remove_surrogates(item[b'path']),
  500. 'extra': extra,
  501. 'LF': '\n',
  502. }
  503. if use_user_format:
  504. item_data_advanced = {
  505. 'bmode': item[b'mode'],
  506. 'type': type,
  507. 'source': source,
  508. 'linktarget': source,
  509. 'uid': item[b'uid'],
  510. 'gid': item[b'gid'],
  511. 'mtime': mtime,
  512. 'isoctime': format_time(ctime),
  513. 'ctime': ctime,
  514. 'isoatime': format_time(atime),
  515. 'atime': atime,
  516. 'archivename': archive.name,
  517. 'SPACE': ' ',
  518. 'TAB': '\t',
  519. 'CR': '\r',
  520. 'NEWLINE': os.linesep,
  521. }
  522. item_data.update(item_data_advanced)
  523. item_data['formatkeys'] = list(item_data.keys())
  524. print(format_line(list_format, item_data), end='')
  525. else:
  526. for archive_info in manifest.list_archive_infos(sort_by='ts'):
  527. if args.prefix and not archive_info.name.startswith(args.prefix):
  528. continue
  529. if args.short:
  530. print(archive_info.name)
  531. else:
  532. print(format_archive(archive_info))
  533. return self.exit_code
  534. @with_repository(cache=True)
  535. @with_archive
  536. def do_info(self, args, repository, manifest, key, archive, cache):
  537. """Show archive details such as disk space used"""
  538. stats = archive.calc_stats(cache)
  539. print('Name:', archive.name)
  540. print('Fingerprint: %s' % hexlify(archive.id).decode('ascii'))
  541. print('Hostname:', archive.metadata[b'hostname'])
  542. print('Username:', archive.metadata[b'username'])
  543. print('Time (start): %s' % format_time(to_localtime(archive.ts)))
  544. print('Time (end): %s' % format_time(to_localtime(archive.ts_end)))
  545. print('Command line:', remove_surrogates(' '.join(archive.metadata[b'cmdline'])))
  546. print('Number of files: %d' % stats.nfiles)
  547. print()
  548. print(str(stats))
  549. print(str(cache))
  550. return self.exit_code
  551. @with_repository(exclusive=True)
  552. def do_prune(self, args, repository, manifest, key):
  553. """Prune repository archives according to specified rules"""
  554. if not any((args.hourly, args.daily,
  555. args.weekly, args.monthly, args.yearly, args.within)):
  556. self.print_error('At least one of the "keep-within", "keep-last", '
  557. '"keep-hourly", "keep-daily", '
  558. '"keep-weekly", "keep-monthly" or "keep-yearly" settings must be specified.')
  559. return self.exit_code
  560. archives = manifest.list_archive_infos(sort_by='ts', reverse=True) # just a ArchiveInfo list
  561. if args.prefix:
  562. archives = [archive for archive in archives if archive.name.startswith(args.prefix)]
  563. # ignore all checkpoint archives to avoid keeping one (which is an incomplete backup)
  564. # that is newer than a successfully completed backup - and killing the successful backup.
  565. is_checkpoint = re.compile(r'\.checkpoint(\.\d+)?$').search
  566. archives = [archive for archive in archives if not is_checkpoint(archive.name)]
  567. keep = []
  568. if args.within:
  569. keep += prune_within(archives, args.within)
  570. if args.hourly:
  571. keep += prune_split(archives, '%Y-%m-%d %H', args.hourly, keep)
  572. if args.daily:
  573. keep += prune_split(archives, '%Y-%m-%d', args.daily, keep)
  574. if args.weekly:
  575. keep += prune_split(archives, '%G-%V', args.weekly, keep)
  576. if args.monthly:
  577. keep += prune_split(archives, '%Y-%m', args.monthly, keep)
  578. if args.yearly:
  579. keep += prune_split(archives, '%Y', args.yearly, keep)
  580. keep.sort(key=attrgetter('ts'), reverse=True)
  581. to_delete = [a for a in archives if a not in keep]
  582. stats = Statistics()
  583. with Cache(repository, key, manifest, do_files=args.cache_files, lock_wait=self.lock_wait) as cache:
  584. for archive in keep:
  585. if args.output_list:
  586. logger.info('Keeping archive: %s' % format_archive(archive))
  587. for archive in to_delete:
  588. if args.dry_run:
  589. if args.output_list:
  590. logger.info('Would prune: %s' % format_archive(archive))
  591. else:
  592. if args.output_list:
  593. logger.info('Pruning archive: %s' % format_archive(archive))
  594. Archive(repository, key, manifest, archive.name, cache).delete(stats, forced=args.forced)
  595. if to_delete and not args.dry_run:
  596. manifest.write()
  597. repository.commit(save_space=args.save_space)
  598. cache.commit()
  599. if args.stats:
  600. log_multi(DASHES,
  601. stats.summary.format(label='Deleted data:', stats=stats),
  602. str(cache),
  603. DASHES)
  604. return self.exit_code
  605. def do_upgrade(self, args):
  606. """upgrade a repository from a previous version"""
  607. # mainly for upgrades from Attic repositories,
  608. # but also supports borg 0.xx -> 1.0 upgrade.
  609. repo = AtticRepositoryUpgrader(args.location.path, create=False)
  610. try:
  611. repo.upgrade(args.dry_run, inplace=args.inplace, progress=args.progress)
  612. except NotImplementedError as e:
  613. print("warning: %s" % e)
  614. repo = BorgRepositoryUpgrader(args.location.path, create=False)
  615. try:
  616. repo.upgrade(args.dry_run, inplace=args.inplace, progress=args.progress)
  617. except NotImplementedError as e:
  618. print("warning: %s" % e)
  619. return self.exit_code
  620. @with_repository()
  621. def do_debug_dump_archive_items(self, args, repository, manifest, key):
  622. """dump (decrypted, decompressed) archive items metadata (not: data)"""
  623. archive = Archive(repository, key, manifest, args.location.archive)
  624. for i, item_id in enumerate(archive.metadata[b'items']):
  625. data = key.decrypt(item_id, repository.get(item_id))
  626. filename = '%06d_%s.items' % (i, hexlify(item_id).decode('ascii'))
  627. print('Dumping', filename)
  628. with open(filename, 'wb') as fd:
  629. fd.write(data)
  630. print('Done.')
  631. return EXIT_SUCCESS
  632. @with_repository()
  633. def do_debug_dump_repo_objs(self, args, repository, manifest, key):
  634. """dump (decrypted, decompressed) repo objects"""
  635. marker = None
  636. i = 0
  637. while True:
  638. result = repository.list(limit=10000, marker=marker)
  639. if not result:
  640. break
  641. marker = result[-1]
  642. for id in result:
  643. cdata = repository.get(id)
  644. give_id = id if id != Manifest.MANIFEST_ID else None
  645. data = key.decrypt(give_id, cdata)
  646. filename = '%06d_%s.obj' % (i, hexlify(id).decode('ascii'))
  647. print('Dumping', filename)
  648. with open(filename, 'wb') as fd:
  649. fd.write(data)
  650. i += 1
  651. print('Done.')
  652. return EXIT_SUCCESS
  653. @with_repository(manifest=False)
  654. def do_debug_get_obj(self, args, repository):
  655. """get object contents from the repository and write it into file"""
  656. hex_id = args.id
  657. try:
  658. id = unhexlify(hex_id)
  659. except ValueError:
  660. print("object id %s is invalid." % hex_id)
  661. else:
  662. try:
  663. data = repository.get(id)
  664. except repository.ObjectNotFound:
  665. print("object %s not found." % hex_id)
  666. else:
  667. with open(args.path, "wb") as f:
  668. f.write(data)
  669. print("object %s fetched." % hex_id)
  670. return EXIT_SUCCESS
  671. @with_repository(manifest=False, exclusive=True)
  672. def do_debug_put_obj(self, args, repository):
  673. """put file(s) contents into the repository"""
  674. for path in args.paths:
  675. with open(path, "rb") as f:
  676. data = f.read()
  677. h = sha256(data) # XXX hardcoded
  678. repository.put(h.digest(), data)
  679. print("object %s put." % h.hexdigest())
  680. repository.commit()
  681. return EXIT_SUCCESS
  682. @with_repository(manifest=False, exclusive=True)
  683. def do_debug_delete_obj(self, args, repository):
  684. """delete the objects with the given IDs from the repo"""
  685. modified = False
  686. for hex_id in args.ids:
  687. try:
  688. id = unhexlify(hex_id)
  689. except ValueError:
  690. print("object id %s is invalid." % hex_id)
  691. else:
  692. try:
  693. repository.delete(id)
  694. modified = True
  695. print("object %s deleted." % hex_id)
  696. except repository.ObjectNotFound:
  697. print("object %s not found." % hex_id)
  698. if modified:
  699. repository.commit()
  700. print('Done.')
  701. return EXIT_SUCCESS
  702. @with_repository(lock=False, manifest=False)
  703. def do_break_lock(self, args, repository):
  704. """Break the repository lock (e.g. in case it was left by a dead borg."""
  705. repository.break_lock()
  706. Cache.break_lock(repository)
  707. return self.exit_code
  708. helptext = collections.OrderedDict()
  709. helptext['patterns'] = textwrap.dedent('''
  710. Exclusion patterns support four separate styles, fnmatch, shell, regular
  711. expressions and path prefixes. By default, fnmatch is used. If followed
  712. by a colon (':') the first two characters of a pattern are used as a
  713. style selector. Explicit style selection is necessary when a
  714. non-default style is desired or when the desired pattern starts with
  715. two alphanumeric characters followed by a colon (i.e. `aa:something/*`).
  716. `Fnmatch <https://docs.python.org/3/library/fnmatch.html>`_, selector `fm:`
  717. This is the default style. These patterns use a variant of shell
  718. pattern syntax, with '*' matching any number of characters, '?'
  719. matching any single character, '[...]' matching any single
  720. character specified, including ranges, and '[!...]' matching any
  721. character not specified. For the purpose of these patterns, the
  722. path separator ('\\' for Windows and '/' on other systems) is not
  723. treated specially. Wrap meta-characters in brackets for a literal
  724. match (i.e. `[?]` to match the literal character `?`). For a path
  725. to match a pattern, it must completely match from start to end, or
  726. must match from the start to just before a path separator. Except
  727. for the root path, paths will never end in the path separator when
  728. matching is attempted. Thus, if a given pattern ends in a path
  729. separator, a '*' is appended before matching is attempted.
  730. Shell-style patterns, selector `sh:`
  731. Like fnmatch patterns these are similar to shell patterns. The difference
  732. is that the pattern may include `**/` for matching zero or more directory
  733. levels, `*` for matching zero or more arbitrary characters with the
  734. exception of any path separator.
  735. Regular expressions, selector `re:`
  736. Regular expressions similar to those found in Perl are supported. Unlike
  737. shell patterns regular expressions are not required to match the complete
  738. path and any substring match is sufficient. It is strongly recommended to
  739. anchor patterns to the start ('^'), to the end ('$') or both. Path
  740. separators ('\\' for Windows and '/' on other systems) in paths are
  741. always normalized to a forward slash ('/') before applying a pattern. The
  742. regular expression syntax is described in the `Python documentation for
  743. the re module <https://docs.python.org/3/library/re.html>`_.
  744. Prefix path, selector `pp:`
  745. This pattern style is useful to match whole sub-directories. The pattern
  746. `pp:/data/bar` matches `/data/bar` and everything therein.
  747. Exclusions can be passed via the command line option `--exclude`. When used
  748. from within a shell the patterns should be quoted to protect them from
  749. expansion.
  750. The `--exclude-from` option permits loading exclusion patterns from a text
  751. file with one pattern per line. Lines empty or starting with the number sign
  752. ('#') after removing whitespace on both ends are ignored. The optional style
  753. selector prefix is also supported for patterns loaded from a file. Due to
  754. whitespace removal paths with whitespace at the beginning or end can only be
  755. excluded using regular expressions.
  756. Examples::
  757. # Exclude '/home/user/file.o' but not '/home/user/file.odt':
  758. $ borg create -e '*.o' backup /
  759. # Exclude '/home/user/junk' and '/home/user/subdir/junk' but
  760. # not '/home/user/importantjunk' or '/etc/junk':
  761. $ borg create -e '/home/*/junk' backup /
  762. # Exclude the contents of '/home/user/cache' but not the directory itself:
  763. $ borg create -e /home/user/cache/ backup /
  764. # The file '/home/user/cache/important' is *not* backed up:
  765. $ borg create -e /home/user/cache/ backup / /home/user/cache/important
  766. # The contents of directories in '/home' are not backed up when their name
  767. # ends in '.tmp'
  768. $ borg create --exclude 're:^/home/[^/]+\.tmp/' backup /
  769. # Load exclusions from file
  770. $ cat >exclude.txt <<EOF
  771. # Comment line
  772. /home/*/junk
  773. *.tmp
  774. fm:aa:something/*
  775. re:^/home/[^/]\.tmp/
  776. sh:/home/*/.thumbnails
  777. EOF
  778. $ borg create --exclude-from exclude.txt backup /\n\n''')
  779. helptext['placeholders'] = textwrap.dedent('''
  780. Repository (or Archive) URLs, --prefix and --remote-path values support these
  781. placeholders:
  782. {hostname}
  783. The (short) hostname of the machine.
  784. {fqdn}
  785. The full name of the machine.
  786. {now}
  787. The current local date and time.
  788. {utcnow}
  789. The current UTC date and time.
  790. {user}
  791. The user name (or UID, if no name is available) of the user running borg.
  792. {pid}
  793. The current process ID.
  794. {borgversion}
  795. The version of borg.
  796. Examples::
  797. borg create /path/to/repo::{hostname}-{user}-{utcnow} ...
  798. borg create /path/to/repo::{hostname}-{now:%Y-%m-%d_%H:%M:%S} ...
  799. borg prune --prefix '{hostname}-' ...\n\n''')
  800. def do_help(self, parser, commands, args):
  801. if not args.topic:
  802. parser.print_help()
  803. elif args.topic in self.helptext:
  804. print(self.helptext[args.topic])
  805. elif args.topic in commands:
  806. if args.epilog_only:
  807. print(commands[args.topic].epilog)
  808. elif args.usage_only:
  809. commands[args.topic].epilog = None
  810. commands[args.topic].print_help()
  811. else:
  812. commands[args.topic].print_help()
  813. else:
  814. parser.error('No help available on %s' % (args.topic,))
  815. return self.exit_code
  816. def preprocess_args(self, args):
  817. deprecations = [
  818. # ('--old', '--new', 'Warning: "--old" has been deprecated. Use "--new" instead.'),
  819. ]
  820. for i, arg in enumerate(args[:]):
  821. for old_name, new_name, warning in deprecations:
  822. if arg.startswith(old_name):
  823. args[i] = arg.replace(old_name, new_name)
  824. print(warning)
  825. return args
  826. def build_parser(self, args=None, prog=None):
  827. common_parser = argparse.ArgumentParser(add_help=False, prog=prog)
  828. common_parser.add_argument('--critical', dest='log_level',
  829. action='store_const', const='critical', default='warning',
  830. help='work on log level CRITICAL')
  831. common_parser.add_argument('--error', dest='log_level',
  832. action='store_const', const='error', default='warning',
  833. help='work on log level ERROR')
  834. common_parser.add_argument('--warning', dest='log_level',
  835. action='store_const', const='warning', default='warning',
  836. help='work on log level WARNING (default)')
  837. common_parser.add_argument('--info', '-v', '--verbose', dest='log_level',
  838. action='store_const', const='info', default='warning',
  839. help='work on log level INFO')
  840. common_parser.add_argument('--debug', dest='log_level',
  841. action='store_const', const='debug', default='warning',
  842. help='work on log level DEBUG')
  843. common_parser.add_argument('--lock-wait', dest='lock_wait', type=int, metavar='N', default=1,
  844. help='wait for the lock, but max. N seconds (default: %(default)d).')
  845. common_parser.add_argument('--show-rc', dest='show_rc', action='store_true', default=False,
  846. help='show/log the return code (rc)')
  847. common_parser.add_argument('--no-files-cache', dest='cache_files', action='store_false',
  848. help='do not load/update the file metadata cache used to detect unchanged files')
  849. common_parser.add_argument('--umask', dest='umask', type=lambda s: int(s, 8), default=UMASK_DEFAULT, metavar='M',
  850. help='set umask to M (local and remote, default: %(default)04o)')
  851. common_parser.add_argument('--remote-path', dest='remote_path', metavar='PATH',
  852. help='set remote path to executable (default: "borg")')
  853. parser = argparse.ArgumentParser(prog=prog, description='Borg - Deduplicated Backups')
  854. parser.add_argument('-V', '--version', action='version', version='%(prog)s ' + __version__,
  855. help='show version number and exit')
  856. subparsers = parser.add_subparsers(title='required arguments', metavar='<command>')
  857. serve_epilog = textwrap.dedent("""
  858. This command starts a repository server process. This command is usually not used manually.
  859. """)
  860. subparser = subparsers.add_parser('serve', parents=[common_parser],
  861. description=self.do_serve.__doc__, epilog=serve_epilog,
  862. formatter_class=argparse.RawDescriptionHelpFormatter,
  863. help='start repository server process')
  864. subparser.set_defaults(func=self.do_serve)
  865. subparser.add_argument('--restrict-to-path', dest='restrict_to_paths', action='append',
  866. metavar='PATH', help='restrict repository access to PATH')
  867. subparser.add_argument('--append-only', dest='append_only', action='store_true',
  868. help='only allow appending to repository segment files')
  869. init_epilog = textwrap.dedent("""
  870. This command initializes an empty repository. A repository is a filesystem
  871. directory containing the deduplicated data from zero or more archives.
  872. Encryption can be enabled at repository init time.
  873. """)
  874. subparser = subparsers.add_parser('init', parents=[common_parser],
  875. description=self.do_init.__doc__, epilog=init_epilog,
  876. formatter_class=argparse.RawDescriptionHelpFormatter,
  877. help='initialize empty repository')
  878. subparser.set_defaults(func=self.do_init)
  879. subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
  880. type=location_validator(archive=False),
  881. help='repository to create')
  882. subparser.add_argument('-e', '--encryption', dest='encryption',
  883. choices=('none', 'keyfile', 'repokey'), default='repokey',
  884. help='select encryption key mode (default: "%(default)s")')
  885. subparser.add_argument('-a', '--append-only', dest='append_only', action='store_true',
  886. help='create an append-only mode repository')
  887. check_epilog = textwrap.dedent("""
  888. The check command verifies the consistency of a repository and the corresponding archives.
  889. First, the underlying repository data files are checked:
  890. - For all segments the segment magic (header) is checked
  891. - For all objects stored in the segments, all metadata (e.g. crc and size) and
  892. all data is read. The read data is checked by size and CRC. Bit rot and other
  893. types of accidental damage can be detected this way.
  894. - If we are in repair mode and a integrity error is detected for a segment,
  895. we try to recover as many objects from the segment as possible.
  896. - In repair mode, it makes sure that the index is consistent with the data
  897. stored in the segments.
  898. - If you use a remote repo server via ssh:, the repo check is executed on the
  899. repo server without causing significant network traffic.
  900. - The repository check can be skipped using the --archives-only option.
  901. Second, the consistency and correctness of the archive metadata is verified:
  902. - Is the repo manifest present? If not, it is rebuilt from archive metadata
  903. chunks (this requires reading and decrypting of all metadata and data).
  904. - Check if archive metadata chunk is present. if not, remove archive from
  905. manifest.
  906. - For all files (items) in the archive, for all chunks referenced by these
  907. files, check if chunk is present.
  908. If a chunk is not present and we are in repair mode, replace it with a same-size
  909. replacement chunk of zeros.
  910. If a previously lost chunk reappears (e.g. via a later backup) and we are in
  911. repair mode, the all-zero replacement chunk will be replaced by the correct chunk.
  912. This requires reading of archive and file metadata, but not data.
  913. - If we are in repair mode and we checked all the archives: delete orphaned
  914. chunks from the repo.
  915. - if you use a remote repo server via ssh:, the archive check is executed on
  916. the client machine (because if encryption is enabled, the checks will require
  917. decryption and this is always done client-side, because key access will be
  918. required).
  919. - The archive checks can be time consuming, they can be skipped using the
  920. --repository-only option.
  921. """)
  922. subparser = subparsers.add_parser('check', parents=[common_parser],
  923. description=self.do_check.__doc__,
  924. epilog=check_epilog,
  925. formatter_class=argparse.RawDescriptionHelpFormatter,
  926. help='verify repository')
  927. subparser.set_defaults(func=self.do_check)
  928. subparser.add_argument('location', metavar='REPOSITORY_OR_ARCHIVE', nargs='?', default='',
  929. type=location_validator(),
  930. help='repository or archive to check consistency of')
  931. subparser.add_argument('--repository-only', dest='repo_only', action='store_true',
  932. default=False,
  933. help='only perform repository checks')
  934. subparser.add_argument('--archives-only', dest='archives_only', action='store_true',
  935. default=False,
  936. help='only perform archives checks')
  937. subparser.add_argument('--repair', dest='repair', action='store_true',
  938. default=False,
  939. help='attempt to repair any inconsistencies found')
  940. subparser.add_argument('--save-space', dest='save_space', action='store_true',
  941. default=False,
  942. help='work slower, but using less space')
  943. subparser.add_argument('--last', dest='last',
  944. type=int, default=None, metavar='N',
  945. help='only check last N archives (Default: all)')
  946. subparser.add_argument('-P', '--prefix', dest='prefix', type=PrefixSpec,
  947. help='only consider archive names starting with this prefix')
  948. change_passphrase_epilog = textwrap.dedent("""
  949. The key files used for repository encryption are optionally passphrase
  950. protected. This command can be used to change this passphrase.
  951. """)
  952. subparser = subparsers.add_parser('change-passphrase', parents=[common_parser],
  953. description=self.do_change_passphrase.__doc__,
  954. epilog=change_passphrase_epilog,
  955. formatter_class=argparse.RawDescriptionHelpFormatter,
  956. help='change repository passphrase')
  957. subparser.set_defaults(func=self.do_change_passphrase)
  958. subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
  959. type=location_validator(archive=False))
  960. migrate_to_repokey_epilog = textwrap.dedent("""
  961. This command migrates a repository from passphrase mode (not supported any
  962. more) to repokey mode.
  963. You will be first asked for the repository passphrase (to open it in passphrase
  964. mode). This is the same passphrase as you used to use for this repo before 1.0.
  965. It will then derive the different secrets from this passphrase.
  966. Then you will be asked for a new passphrase (twice, for safety). This
  967. passphrase will be used to protect the repokey (which contains these same
  968. secrets in encrypted form). You may use the same passphrase as you used to
  969. use, but you may also use a different one.
  970. After migrating to repokey mode, you can change the passphrase at any time.
  971. But please note: the secrets will always stay the same and they could always
  972. be derived from your (old) passphrase-mode passphrase.
  973. """)
  974. subparser = subparsers.add_parser('migrate-to-repokey', parents=[common_parser],
  975. description=self.do_migrate_to_repokey.__doc__,
  976. epilog=migrate_to_repokey_epilog,
  977. formatter_class=argparse.RawDescriptionHelpFormatter,
  978. help='migrate passphrase-mode repository to repokey')
  979. subparser.set_defaults(func=self.do_migrate_to_repokey)
  980. subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
  981. type=location_validator(archive=False))
  982. create_epilog = textwrap.dedent("""
  983. This command creates a backup archive containing all files found while recursively
  984. traversing all paths specified. The archive will consume almost no disk space for
  985. files or parts of files that have already been stored in other archives.
  986. The archive name needs to be unique. It must not end in '.checkpoint' or
  987. '.checkpoint.N' (with N being a number), because these names are used for
  988. checkpoints and treated in special ways.
  989. In the archive name, you may use the following format tags:
  990. {now}, {utcnow}, {fqdn}, {hostname}, {user}, {pid}, {borgversion}
  991. To speed up pulling backups over sshfs and similar network file systems which do
  992. not provide correct inode information the --ignore-inode flag can be used. This
  993. potentially decreases reliability of change detection, while avoiding always reading
  994. all files on these file systems.
  995. See the output of the "borg help patterns" command for more help on exclude patterns.
  996. See the output of the "borg help placeholders" command for more help on placeholders.
  997. """)
  998. subparser = subparsers.add_parser('create', parents=[common_parser],
  999. description=self.do_create.__doc__,
  1000. epilog=create_epilog,
  1001. formatter_class=argparse.RawDescriptionHelpFormatter,
  1002. help='create backup')
  1003. subparser.set_defaults(func=self.do_create)
  1004. subparser.add_argument('-s', '--stats', dest='stats',
  1005. action='store_true', default=False,
  1006. help='print statistics for the created archive')
  1007. subparser.add_argument('-p', '--progress', dest='progress',
  1008. action='store_true', default=False,
  1009. help="""show progress display while creating the archive, showing Original,
  1010. Compressed and Deduplicated sizes, followed by the Number of files seen
  1011. and the path being processed, default: %(default)s""")
  1012. subparser.add_argument('--list', dest='output_list',
  1013. action='store_true', default=False,
  1014. help='output verbose list of items (files, dirs, ...)')
  1015. subparser.add_argument('--filter', dest='output_filter', metavar='STATUSCHARS',
  1016. help='only display items with the given status characters')
  1017. subparser.add_argument('-e', '--exclude', dest='excludes',
  1018. type=parse_pattern, action='append',
  1019. metavar="PATTERN", help='exclude paths matching PATTERN')
  1020. subparser.add_argument('--exclude-from', dest='exclude_files',
  1021. type=argparse.FileType('r'), action='append',
  1022. metavar='EXCLUDEFILE', help='read exclude patterns from EXCLUDEFILE, one per line')
  1023. subparser.add_argument('--exclude-caches', dest='exclude_caches',
  1024. action='store_true', default=False,
  1025. help='exclude directories that contain a CACHEDIR.TAG file (http://www.brynosaurus.com/cachedir/spec.html)')
  1026. subparser.add_argument('--exclude-if-present', dest='exclude_if_present',
  1027. metavar='FILENAME', action='append', type=str,
  1028. help='exclude directories that contain the specified file')
  1029. subparser.add_argument('--keep-tag-files', dest='keep_tag_files',
  1030. action='store_true', default=False,
  1031. help='keep tag files of excluded caches/directories')
  1032. subparser.add_argument('-c', '--checkpoint-interval', dest='checkpoint_interval',
  1033. type=int, default=300, metavar='SECONDS',
  1034. help='write checkpoint every SECONDS seconds (Default: 300)')
  1035. subparser.add_argument('-x', '--one-file-system', dest='one_file_system',
  1036. action='store_true', default=False,
  1037. help='stay in same file system, do not cross mount points')
  1038. subparser.add_argument('--numeric-owner', dest='numeric_owner',
  1039. action='store_true', default=False,
  1040. help='only store numeric user and group identifiers')
  1041. subparser.add_argument('--timestamp', dest='timestamp',
  1042. type=timestamp, default=None,
  1043. metavar='yyyy-mm-ddThh:mm:ss',
  1044. help='manually specify the archive creation date/time (UTC). '
  1045. 'alternatively, give a reference file/directory.')
  1046. subparser.add_argument('--chunker-params', dest='chunker_params',
  1047. type=ChunkerParams, default=CHUNKER_PARAMS,
  1048. metavar='CHUNK_MIN_EXP,CHUNK_MAX_EXP,HASH_MASK_BITS,HASH_WINDOW_SIZE',
  1049. help='specify the chunker parameters. default: %d,%d,%d,%d' % CHUNKER_PARAMS)
  1050. subparser.add_argument('--ignore-inode', dest='ignore_inode',
  1051. action='store_true', default=False,
  1052. help='ignore inode data in the file metadata cache used to detect unchanged files.')
  1053. subparser.add_argument('-C', '--compression', dest='compression',
  1054. type=CompressionSpec, default=dict(name='none'), metavar='COMPRESSION',
  1055. help='select compression algorithm (and level): '
  1056. 'none == no compression (default), '
  1057. 'lz4 == lz4, '
  1058. 'zlib == zlib (default level 6), '
  1059. 'zlib,0 .. zlib,9 == zlib (with level 0..9), '
  1060. 'lzma == lzma (default level 6), '
  1061. 'lzma,0 .. lzma,9 == lzma (with level 0..9).')
  1062. subparser.add_argument('--read-special', dest='read_special',
  1063. action='store_true', default=False,
  1064. help='open and read block and char device files as well as FIFOs as if they were '
  1065. 'regular files. Also follows symlinks pointing to these kinds of files.')
  1066. subparser.add_argument('-n', '--dry-run', dest='dry_run',
  1067. action='store_true', default=False,
  1068. help='do not create a backup archive')
  1069. subparser.add_argument('location', metavar='ARCHIVE',
  1070. type=location_validator(archive=True),
  1071. help='name of archive to create (must be also a valid directory name)')
  1072. subparser.add_argument('paths', metavar='PATH', nargs='+', type=str,
  1073. help='paths to archive')
  1074. extract_epilog = textwrap.dedent("""
  1075. This command extracts the contents of an archive. By default the entire
  1076. archive is extracted but a subset of files and directories can be selected
  1077. by passing a list of ``PATHs`` as arguments. The file selection can further
  1078. be restricted by using the ``--exclude`` option.
  1079. See the output of the "borg help patterns" command for more help on exclude patterns.
  1080. """)
  1081. subparser = subparsers.add_parser('extract', parents=[common_parser],
  1082. description=self.do_extract.__doc__,
  1083. epilog=extract_epilog,
  1084. formatter_class=argparse.RawDescriptionHelpFormatter,
  1085. help='extract archive contents')
  1086. subparser.set_defaults(func=self.do_extract)
  1087. subparser.add_argument('--list', dest='output_list',
  1088. action='store_true', default=False,
  1089. help='output verbose list of items (files, dirs, ...)')
  1090. subparser.add_argument('-n', '--dry-run', dest='dry_run',
  1091. default=False, action='store_true',
  1092. help='do not actually change any files')
  1093. subparser.add_argument('-e', '--exclude', dest='excludes',
  1094. type=parse_pattern, action='append',
  1095. metavar="PATTERN", help='exclude paths matching PATTERN')
  1096. subparser.add_argument('--exclude-from', dest='exclude_files',
  1097. type=argparse.FileType('r'), action='append',
  1098. metavar='EXCLUDEFILE', help='read exclude patterns from EXCLUDEFILE, one per line')
  1099. subparser.add_argument('--numeric-owner', dest='numeric_owner',
  1100. action='store_true', default=False,
  1101. help='only obey numeric user and group identifiers')
  1102. subparser.add_argument('--strip-components', dest='strip_components',
  1103. type=int, default=0, metavar='NUMBER',
  1104. help='Remove the specified number of leading path elements. Pathnames with fewer elements will be silently skipped.')
  1105. subparser.add_argument('--stdout', dest='stdout',
  1106. action='store_true', default=False,
  1107. help='write all extracted data to stdout')
  1108. subparser.add_argument('--sparse', dest='sparse',
  1109. action='store_true', default=False,
  1110. help='create holes in output sparse file from all-zero chunks')
  1111. subparser.add_argument('location', metavar='ARCHIVE',
  1112. type=location_validator(archive=True),
  1113. help='archive to extract')
  1114. subparser.add_argument('paths', metavar='PATH', nargs='*', type=str,
  1115. help='paths to extract; patterns are supported')
  1116. rename_epilog = textwrap.dedent("""
  1117. This command renames an archive in the repository.
  1118. """)
  1119. subparser = subparsers.add_parser('rename', parents=[common_parser],
  1120. description=self.do_rename.__doc__,
  1121. epilog=rename_epilog,
  1122. formatter_class=argparse.RawDescriptionHelpFormatter,
  1123. help='rename archive')
  1124. subparser.set_defaults(func=self.do_rename)
  1125. subparser.add_argument('location', metavar='ARCHIVE',
  1126. type=location_validator(archive=True),
  1127. help='archive to rename')
  1128. subparser.add_argument('name', metavar='NEWNAME',
  1129. type=archivename_validator(),
  1130. help='the new archive name to use')
  1131. delete_epilog = textwrap.dedent("""
  1132. This command deletes an archive from the repository or the complete repository.
  1133. Disk space is reclaimed accordingly. If you delete the complete repository, the
  1134. local cache for it (if any) is also deleted.
  1135. """)
  1136. subparser = subparsers.add_parser('delete', parents=[common_parser],
  1137. description=self.do_delete.__doc__,
  1138. epilog=delete_epilog,
  1139. formatter_class=argparse.RawDescriptionHelpFormatter,
  1140. help='delete archive')
  1141. subparser.set_defaults(func=self.do_delete)
  1142. subparser.add_argument('-p', '--progress', dest='progress',
  1143. action='store_true', default=False,
  1144. help="""show progress display while deleting a single archive""")
  1145. subparser.add_argument('-s', '--stats', dest='stats',
  1146. action='store_true', default=False,
  1147. help='print statistics for the deleted archive')
  1148. subparser.add_argument('-c', '--cache-only', dest='cache_only',
  1149. action='store_true', default=False,
  1150. help='delete only the local cache for the given repository')
  1151. subparser.add_argument('--force', dest='forced',
  1152. action='store_true', default=False,
  1153. help='force deletion of corrupted archives')
  1154. subparser.add_argument('--save-space', dest='save_space', action='store_true',
  1155. default=False,
  1156. help='work slower, but using less space')
  1157. subparser.add_argument('location', metavar='TARGET', nargs='?', default='',
  1158. type=location_validator(),
  1159. help='archive or repository to delete')
  1160. list_epilog = textwrap.dedent("""
  1161. This command lists the contents of a repository or an archive.
  1162. """)
  1163. subparser = subparsers.add_parser('list', parents=[common_parser],
  1164. description=self.do_list.__doc__,
  1165. epilog=list_epilog,
  1166. formatter_class=argparse.RawDescriptionHelpFormatter,
  1167. help='list archive or repository contents')
  1168. subparser.set_defaults(func=self.do_list)
  1169. subparser.add_argument('--short', dest='short',
  1170. action='store_true', default=False,
  1171. help='only print file/directory names, nothing else')
  1172. subparser.add_argument('--list-format', dest='listformat', type=str,
  1173. help="""specify format for archive file listing
  1174. (default: "{mode} {user:6} {group:6} {size:8d} {isomtime} {path}{extra}{NEWLINE}")
  1175. Special "{formatkeys}" exists to list available keys""")
  1176. subparser.add_argument('-P', '--prefix', dest='prefix', type=PrefixSpec,
  1177. help='only consider archive names starting with this prefix')
  1178. subparser.add_argument('location', metavar='REPOSITORY_OR_ARCHIVE', nargs='?', default='',
  1179. type=location_validator(),
  1180. help='repository/archive to list contents of')
  1181. mount_epilog = textwrap.dedent("""
  1182. This command mounts an archive as a FUSE filesystem. This can be useful for
  1183. browsing an archive or restoring individual files. Unless the ``--foreground``
  1184. option is given the command will run in the background until the filesystem
  1185. is ``umounted``.
  1186. The BORG_MOUNT_DATA_CACHE_ENTRIES environment variable is meant for advanced users
  1187. to tweak the performance. It sets the number of cached data chunks; additional
  1188. memory usage can be up to ~8 MiB times this number. The default is the number
  1189. of CPU cores.
  1190. For mount options, see the fuse(8) manual page. Additional mount options
  1191. supported by borg:
  1192. - allow_damaged_files: by default damaged files (where missing chunks were
  1193. replaced with runs of zeros by borg check --repair) are not readable and
  1194. return EIO (I/O error). Set this option to read such files.
  1195. """)
  1196. subparser = subparsers.add_parser('mount', parents=[common_parser],
  1197. description=self.do_mount.__doc__,
  1198. epilog=mount_epilog,
  1199. formatter_class=argparse.RawDescriptionHelpFormatter,
  1200. help='mount repository')
  1201. subparser.set_defaults(func=self.do_mount)
  1202. subparser.add_argument('location', metavar='REPOSITORY_OR_ARCHIVE', type=location_validator(),
  1203. help='repository/archive to mount')
  1204. subparser.add_argument('mountpoint', metavar='MOUNTPOINT', type=str,
  1205. help='where to mount filesystem')
  1206. subparser.add_argument('-f', '--foreground', dest='foreground',
  1207. action='store_true', default=False,
  1208. help='stay in foreground, do not daemonize')
  1209. subparser.add_argument('-o', dest='options', type=str,
  1210. help='Extra mount options')
  1211. info_epilog = textwrap.dedent("""
  1212. This command displays some detailed information about the specified archive.
  1213. """)
  1214. subparser = subparsers.add_parser('info', parents=[common_parser],
  1215. description=self.do_info.__doc__,
  1216. epilog=info_epilog,
  1217. formatter_class=argparse.RawDescriptionHelpFormatter,
  1218. help='show archive information')
  1219. subparser.set_defaults(func=self.do_info)
  1220. subparser.add_argument('location', metavar='ARCHIVE',
  1221. type=location_validator(archive=True),
  1222. help='archive to display information about')
  1223. break_lock_epilog = textwrap.dedent("""
  1224. This command breaks the repository and cache locks.
  1225. Please use carefully and only while no borg process (on any machine) is
  1226. trying to access the Cache or the Repository.
  1227. """)
  1228. subparser = subparsers.add_parser('break-lock', parents=[common_parser],
  1229. description=self.do_break_lock.__doc__,
  1230. epilog=break_lock_epilog,
  1231. formatter_class=argparse.RawDescriptionHelpFormatter,
  1232. help='break repository and cache locks')
  1233. subparser.set_defaults(func=self.do_break_lock)
  1234. subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
  1235. type=location_validator(archive=False),
  1236. help='repository for which to break the locks')
  1237. prune_epilog = textwrap.dedent("""
  1238. The prune command prunes a repository by deleting all archives not matching
  1239. any of the specified retention options. This command is normally used by
  1240. automated backup scripts wanting to keep a certain number of historic backups.
  1241. As an example, "-d 7" means to keep the latest backup on each day, up to 7
  1242. most recent days with backups (days without backups do not count).
  1243. The rules are applied from hourly to yearly, and backups selected by previous
  1244. rules do not count towards those of later rules. The time that each backup
  1245. starts is used for pruning purposes. Dates and times are interpreted in
  1246. the local timezone, and weeks go from Monday to Sunday. Specifying a
  1247. negative number of archives to keep means that there is no limit.
  1248. The "--keep-within" option takes an argument of the form "<int><char>",
  1249. where char is "H", "d", "w", "m", "y". For example, "--keep-within 2d" means
  1250. to keep all archives that were created within the past 48 hours.
  1251. "1m" is taken to mean "31d". The archives kept with this option do not
  1252. count towards the totals specified by any other options.
  1253. If a prefix is set with -P, then only archives that start with the prefix are
  1254. considered for deletion and only those archives count towards the totals
  1255. specified by the rules.
  1256. Otherwise, *all* archives in the repository are candidates for deletion!
  1257. """)
  1258. subparser = subparsers.add_parser('prune', parents=[common_parser],
  1259. description=self.do_prune.__doc__,
  1260. epilog=prune_epilog,
  1261. formatter_class=argparse.RawDescriptionHelpFormatter,
  1262. help='prune archives')
  1263. subparser.set_defaults(func=self.do_prune)
  1264. subparser.add_argument('-n', '--dry-run', dest='dry_run',
  1265. default=False, action='store_true',
  1266. help='do not change repository')
  1267. subparser.add_argument('--force', dest='forced',
  1268. action='store_true', default=False,
  1269. help='force pruning of corrupted archives')
  1270. subparser.add_argument('-s', '--stats', dest='stats',
  1271. action='store_true', default=False,
  1272. help='print statistics for the deleted archive')
  1273. subparser.add_argument('--list', dest='output_list',
  1274. action='store_true', default=False,
  1275. help='output verbose list of archives it keeps/prunes')
  1276. subparser.add_argument('--keep-within', dest='within', type=str, metavar='WITHIN',
  1277. help='keep all archives within this time interval')
  1278. subparser.add_argument('-H', '--keep-hourly', dest='hourly', type=int, default=0,
  1279. help='number of hourly archives to keep')
  1280. subparser.add_argument('-d', '--keep-daily', dest='daily', type=int, default=0,
  1281. help='number of daily archives to keep')
  1282. subparser.add_argument('-w', '--keep-weekly', dest='weekly', type=int, default=0,
  1283. help='number of weekly archives to keep')
  1284. subparser.add_argument('-m', '--keep-monthly', dest='monthly', type=int, default=0,
  1285. help='number of monthly archives to keep')
  1286. subparser.add_argument('-y', '--keep-yearly', dest='yearly', type=int, default=0,
  1287. help='number of yearly archives to keep')
  1288. subparser.add_argument('-P', '--prefix', dest='prefix', type=PrefixSpec,
  1289. help='only consider archive names starting with this prefix')
  1290. subparser.add_argument('--save-space', dest='save_space', action='store_true',
  1291. default=False,
  1292. help='work slower, but using less space')
  1293. subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
  1294. type=location_validator(archive=False),
  1295. help='repository to prune')
  1296. upgrade_epilog = textwrap.dedent("""
  1297. Upgrade an existing Borg repository.
  1298. This currently supports converting an Attic repository to Borg and also
  1299. helps with converting Borg 0.xx to 1.0.
  1300. Currently, only LOCAL repositories can be upgraded (issue #465).
  1301. It will change the magic strings in the repository's segments
  1302. to match the new Borg magic strings. The keyfiles found in
  1303. $ATTIC_KEYS_DIR or ~/.attic/keys/ will also be converted and
  1304. copied to $BORG_KEYS_DIR or ~/.config/borg/keys.
  1305. The cache files are converted, from $ATTIC_CACHE_DIR or
  1306. ~/.cache/attic to $BORG_CACHE_DIR or ~/.cache/borg, but the
  1307. cache layout between Borg and Attic changed, so it is possible
  1308. the first backup after the conversion takes longer than expected
  1309. due to the cache resync.
  1310. Upgrade should be able to resume if interrupted, although it
  1311. will still iterate over all segments. If you want to start
  1312. from scratch, use `borg delete` over the copied repository to
  1313. make sure the cache files are also removed:
  1314. borg delete borg
  1315. Unless ``--inplace`` is specified, the upgrade process first
  1316. creates a backup copy of the repository, in
  1317. REPOSITORY.upgrade-DATETIME, using hardlinks. This takes
  1318. longer than in place upgrades, but is much safer and gives
  1319. progress information (as opposed to ``cp -al``). Once you are
  1320. satisfied with the conversion, you can safely destroy the
  1321. backup copy.
  1322. WARNING: Running the upgrade in place will make the current
  1323. copy unusable with older version, with no way of going back
  1324. to previous versions. This can PERMANENTLY DAMAGE YOUR
  1325. REPOSITORY! Attic CAN NOT READ BORG REPOSITORIES, as the
  1326. magic strings have changed. You have been warned.""")
  1327. subparser = subparsers.add_parser('upgrade', parents=[common_parser],
  1328. description=self.do_upgrade.__doc__,
  1329. epilog=upgrade_epilog,
  1330. formatter_class=argparse.RawDescriptionHelpFormatter,
  1331. help='upgrade repository format')
  1332. subparser.set_defaults(func=self.do_upgrade)
  1333. subparser.add_argument('-p', '--progress', dest='progress',
  1334. action='store_true', default=False,
  1335. help="""show progress display while upgrading the repository""")
  1336. subparser.add_argument('-n', '--dry-run', dest='dry_run',
  1337. default=False, action='store_true',
  1338. help='do not change repository')
  1339. subparser.add_argument('-i', '--inplace', dest='inplace',
  1340. default=False, action='store_true',
  1341. help="""rewrite repository in place, with no chance of going back to older
  1342. versions of the repository.""")
  1343. subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
  1344. type=location_validator(archive=False),
  1345. help='path to the repository to be upgraded')
  1346. subparser = subparsers.add_parser('help', parents=[common_parser],
  1347. description='Extra help')
  1348. subparser.add_argument('--epilog-only', dest='epilog_only',
  1349. action='store_true', default=False)
  1350. subparser.add_argument('--usage-only', dest='usage_only',
  1351. action='store_true', default=False)
  1352. subparser.set_defaults(func=functools.partial(self.do_help, parser, subparsers.choices))
  1353. subparser.add_argument('topic', metavar='TOPIC', type=str, nargs='?',
  1354. help='additional help on TOPIC')
  1355. debug_dump_archive_items_epilog = textwrap.dedent("""
  1356. This command dumps raw (but decrypted and decompressed) archive items (only metadata) to files.
  1357. """)
  1358. subparser = subparsers.add_parser('debug-dump-archive-items', parents=[common_parser],
  1359. description=self.do_debug_dump_archive_items.__doc__,
  1360. epilog=debug_dump_archive_items_epilog,
  1361. formatter_class=argparse.RawDescriptionHelpFormatter,
  1362. help='dump archive items (metadata) (debug)')
  1363. subparser.set_defaults(func=self.do_debug_dump_archive_items)
  1364. subparser.add_argument('location', metavar='ARCHIVE',
  1365. type=location_validator(archive=True),
  1366. help='archive to dump')
  1367. debug_dump_repo_objs_epilog = textwrap.dedent("""
  1368. This command dumps raw (but decrypted and decompressed) repo objects to files.
  1369. """)
  1370. subparser = subparsers.add_parser('debug-dump-repo-objs', parents=[common_parser],
  1371. description=self.do_debug_dump_repo_objs.__doc__,
  1372. epilog=debug_dump_repo_objs_epilog,
  1373. formatter_class=argparse.RawDescriptionHelpFormatter,
  1374. help='dump repo objects (debug)')
  1375. subparser.set_defaults(func=self.do_debug_dump_repo_objs)
  1376. subparser.add_argument('location', metavar='REPOSITORY',
  1377. type=location_validator(archive=False),
  1378. help='repo to dump')
  1379. debug_get_obj_epilog = textwrap.dedent("""
  1380. This command gets an object from the repository.
  1381. """)
  1382. subparser = subparsers.add_parser('debug-get-obj', parents=[common_parser],
  1383. description=self.do_debug_get_obj.__doc__,
  1384. epilog=debug_get_obj_epilog,
  1385. formatter_class=argparse.RawDescriptionHelpFormatter,
  1386. help='get object from repository (debug)')
  1387. subparser.set_defaults(func=self.do_debug_get_obj)
  1388. subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
  1389. type=location_validator(archive=False),
  1390. help='repository to use')
  1391. subparser.add_argument('id', metavar='ID', type=str,
  1392. help='hex object ID to get from the repo')
  1393. subparser.add_argument('path', metavar='PATH', type=str,
  1394. help='file to write object data into')
  1395. debug_put_obj_epilog = textwrap.dedent("""
  1396. This command puts objects into the repository.
  1397. """)
  1398. subparser = subparsers.add_parser('debug-put-obj', parents=[common_parser],
  1399. description=self.do_debug_put_obj.__doc__,
  1400. epilog=debug_put_obj_epilog,
  1401. formatter_class=argparse.RawDescriptionHelpFormatter,
  1402. help='put object to repository (debug)')
  1403. subparser.set_defaults(func=self.do_debug_put_obj)
  1404. subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
  1405. type=location_validator(archive=False),
  1406. help='repository to use')
  1407. subparser.add_argument('paths', metavar='PATH', nargs='+', type=str,
  1408. help='file(s) to read and create object(s) from')
  1409. debug_delete_obj_epilog = textwrap.dedent("""
  1410. This command deletes objects from the repository.
  1411. """)
  1412. subparser = subparsers.add_parser('debug-delete-obj', parents=[common_parser],
  1413. description=self.do_debug_delete_obj.__doc__,
  1414. epilog=debug_delete_obj_epilog,
  1415. formatter_class=argparse.RawDescriptionHelpFormatter,
  1416. help='delete object from repository (debug)')
  1417. subparser.set_defaults(func=self.do_debug_delete_obj)
  1418. subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
  1419. type=location_validator(archive=False),
  1420. help='repository to use')
  1421. subparser.add_argument('ids', metavar='IDs', nargs='+', type=str,
  1422. help='hex object ID(s) to delete from the repo')
  1423. return parser
  1424. def get_args(self, argv, cmd):
  1425. """usually, just returns argv, except if we deal with a ssh forced command for borg serve."""
  1426. result = self.parse_args(argv[1:])
  1427. if cmd is not None and result.func == self.do_serve:
  1428. forced_result = result
  1429. argv = shlex.split(cmd)
  1430. result = self.parse_args(argv[1:])
  1431. if result.func != forced_result.func:
  1432. # someone is trying to execute a different borg subcommand, don't do that!
  1433. return forced_result
  1434. # we only take specific options from the forced "borg serve" command:
  1435. result.restrict_to_paths = forced_result.restrict_to_paths
  1436. result.append_only = forced_result.append_only
  1437. return result
  1438. def parse_args(self, args=None):
  1439. # We can't use argparse for "serve" since we don't want it to show up in "Available commands"
  1440. if args:
  1441. args = self.preprocess_args(args)
  1442. parser = self.build_parser(args)
  1443. args = parser.parse_args(args or ['-h'])
  1444. update_excludes(args)
  1445. return args
  1446. def run(self, args):
  1447. os.umask(args.umask) # early, before opening files
  1448. self.lock_wait = args.lock_wait
  1449. setup_logging(level=args.log_level, is_serve=args.func == self.do_serve) # do not use loggers before this!
  1450. check_extension_modules()
  1451. if is_slow_msgpack():
  1452. logger.warning("Using a pure-python msgpack! This will result in lower performance.")
  1453. return args.func(args)
  1454. def sig_info_handler(signum, stack): # pragma: no cover
  1455. """search the stack for infos about the currently processed file and print them"""
  1456. for frame in inspect.getouterframes(stack):
  1457. func, loc = frame[3], frame[0].f_locals
  1458. if func in ('process_file', '_process', ): # create op
  1459. path = loc['path']
  1460. try:
  1461. pos = loc['fd'].tell()
  1462. total = loc['st'].st_size
  1463. except Exception:
  1464. pos, total = 0, 0
  1465. logger.info("{0} {1}/{2}".format(path, format_file_size(pos), format_file_size(total)))
  1466. break
  1467. if func in ('extract_item', ): # extract op
  1468. path = loc['item'][b'path']
  1469. try:
  1470. pos = loc['fd'].tell()
  1471. except Exception:
  1472. pos = 0
  1473. logger.info("{0} {1}/???".format(path, format_file_size(pos)))
  1474. break
  1475. class SIGTERMReceived(BaseException):
  1476. pass
  1477. def sig_term_handler(signum, stack):
  1478. raise SIGTERMReceived
  1479. def setup_signal_handlers(): # pragma: no cover
  1480. sigs = []
  1481. if hasattr(signal, 'SIGUSR1'):
  1482. sigs.append(signal.SIGUSR1) # kill -USR1 pid
  1483. if hasattr(signal, 'SIGINFO'):
  1484. sigs.append(signal.SIGINFO) # kill -INFO pid (or ctrl-t)
  1485. for sig in sigs:
  1486. signal.signal(sig, sig_info_handler)
  1487. signal.signal(signal.SIGTERM, sig_term_handler)
  1488. def main(): # pragma: no cover
  1489. # Make sure stdout and stderr have errors='replace') to avoid unicode
  1490. # issues when print()-ing unicode file names
  1491. sys.stdout = ErrorIgnoringTextIOWrapper(sys.stdout.buffer, sys.stdout.encoding, 'replace', line_buffering=True)
  1492. sys.stderr = ErrorIgnoringTextIOWrapper(sys.stderr.buffer, sys.stderr.encoding, 'replace', line_buffering=True)
  1493. setup_signal_handlers()
  1494. archiver = Archiver()
  1495. msg = None
  1496. try:
  1497. args = archiver.get_args(sys.argv, os.environ.get('SSH_ORIGINAL_COMMAND'))
  1498. except Error as e:
  1499. msg = e.get_message()
  1500. if e.traceback:
  1501. msg += "\n%s\n%s" % (traceback.format_exc(), sysinfo())
  1502. # we might not have logging setup yet, so get out quickly
  1503. print(msg, file=sys.stderr)
  1504. sys.exit(e.exit_code)
  1505. try:
  1506. exit_code = archiver.run(args)
  1507. except Error as e:
  1508. msg = e.get_message()
  1509. if e.traceback:
  1510. msg += "\n%s\n%s" % (traceback.format_exc(), sysinfo())
  1511. exit_code = e.exit_code
  1512. except RemoteRepository.RPCError as e:
  1513. msg = '%s\n%s' % (str(e), sysinfo())
  1514. exit_code = EXIT_ERROR
  1515. except Exception:
  1516. msg = 'Local Exception.\n%s\n%s' % (traceback.format_exc(), sysinfo())
  1517. exit_code = EXIT_ERROR
  1518. except KeyboardInterrupt:
  1519. msg = 'Keyboard interrupt.\n%s\n%s' % (traceback.format_exc(), sysinfo())
  1520. exit_code = EXIT_ERROR
  1521. except SIGTERMReceived:
  1522. msg = 'Received SIGTERM.'
  1523. exit_code = EXIT_ERROR
  1524. if msg:
  1525. logger.error(msg)
  1526. if args.show_rc:
  1527. exit_msg = 'terminating with %s status, rc %d'
  1528. if exit_code == EXIT_SUCCESS:
  1529. logger.info(exit_msg % ('success', exit_code))
  1530. elif exit_code == EXIT_WARNING:
  1531. logger.warning(exit_msg % ('warning', exit_code))
  1532. elif exit_code == EXIT_ERROR:
  1533. logger.error(exit_msg % ('error', exit_code))
  1534. else:
  1535. # if you see 666 in output, it usually means exit_code was None
  1536. logger.error(exit_msg % ('abnormal', exit_code or 666))
  1537. sys.exit(exit_code)
  1538. if __name__ == '__main__':
  1539. main()