archiver.py 102 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962
  1. from binascii import hexlify, unhexlify
  2. from datetime import datetime
  3. from itertools import zip_longest
  4. from operator import attrgetter
  5. import argparse
  6. import collections
  7. import functools
  8. import hashlib
  9. import inspect
  10. import io
  11. import os
  12. import shlex
  13. import signal
  14. import stat
  15. import sys
  16. import textwrap
  17. import traceback
  18. from . import __version__
  19. from .helpers import Error, location_validator, archivename_validator, format_time, format_file_size, \
  20. parse_pattern, PathPrefixPattern, to_localtime, timestamp, \
  21. get_cache_dir, prune_within, prune_split, \
  22. Manifest, remove_surrogates, update_excludes, format_archive, check_extension_modules, Statistics, \
  23. dir_is_tagged, ChunkerParams, CompressionSpec, is_slow_msgpack, yes, sysinfo, \
  24. log_multi, PatternMatcher, ItemFormatter
  25. from .logger import create_logger, setup_logging
  26. logger = create_logger()
  27. from .compress import Compressor, COMPR_BUFFER
  28. from .upgrader import AtticRepositoryUpgrader, BorgRepositoryUpgrader
  29. from .repository import Repository
  30. from .cache import Cache
  31. from .constants import * # NOQA
  32. from .key import key_creator, RepoKey, PassphraseKey
  33. from .archive import Archive, ArchiveChecker, ArchiveRecreater
  34. from .remote import RepositoryServer, RemoteRepository, cache_if_remote
  35. from .hashindex import ChunkIndexEntry
  36. has_lchflags = hasattr(os, 'lchflags')
  37. def argument(args, str_or_bool):
  38. """If bool is passed, return it. If str is passed, retrieve named attribute from args."""
  39. if isinstance(str_or_bool, str):
  40. return getattr(args, str_or_bool)
  41. return str_or_bool
  42. def with_repository(fake=False, create=False, lock=True, exclusive=False, manifest=True, cache=False):
  43. """
  44. Method decorator for subcommand-handling methods: do_XYZ(self, args, repository, …)
  45. If a parameter (where allowed) is a str the attribute named of args is used instead.
  46. :param fake: (str or bool) use None instead of repository, don't do anything else
  47. :param create: create repository
  48. :param lock: lock repository
  49. :param exclusive: (str or bool) lock repository exclusively (for writing)
  50. :param manifest: load manifest and key, pass them as keyword arguments
  51. :param cache: open cache, pass it as keyword argument (implies manifest)
  52. """
  53. def decorator(method):
  54. @functools.wraps(method)
  55. def wrapper(self, args, **kwargs):
  56. location = args.location # note: 'location' must be always present in args
  57. if argument(args, fake):
  58. return method(self, args, repository=None, **kwargs)
  59. elif location.proto == 'ssh':
  60. repository = RemoteRepository(location, create=create, lock_wait=self.lock_wait, lock=lock, args=args)
  61. else:
  62. repository = Repository(location.path, create=create, exclusive=argument(args, exclusive),
  63. lock_wait=self.lock_wait, lock=lock)
  64. with repository:
  65. if manifest or cache:
  66. kwargs['manifest'], kwargs['key'] = Manifest.load(repository)
  67. if cache:
  68. with Cache(repository, kwargs['key'], kwargs['manifest'],
  69. do_files=getattr(args, 'cache_files', False), lock_wait=self.lock_wait) as cache_:
  70. return method(self, args, repository=repository, cache=cache_, **kwargs)
  71. else:
  72. return method(self, args, repository=repository, **kwargs)
  73. return wrapper
  74. return decorator
  75. def with_archive(method):
  76. @functools.wraps(method)
  77. def wrapper(self, args, repository, key, manifest, **kwargs):
  78. archive = Archive(repository, key, manifest, args.location.archive,
  79. numeric_owner=getattr(args, 'numeric_owner', False), cache=kwargs.get('cache'))
  80. return method(self, args, repository=repository, manifest=manifest, key=key, archive=archive, **kwargs)
  81. return wrapper
  82. class Archiver:
  83. def __init__(self, lock_wait=None):
  84. self.exit_code = EXIT_SUCCESS
  85. self.lock_wait = lock_wait
  86. def print_error(self, msg, *args):
  87. msg = args and msg % args or msg
  88. self.exit_code = EXIT_ERROR
  89. logger.error(msg)
  90. def print_warning(self, msg, *args):
  91. msg = args and msg % args or msg
  92. self.exit_code = EXIT_WARNING # we do not terminate here, so it is a warning
  93. logger.warning(msg)
  94. def print_file_status(self, status, path):
  95. if self.output_list and (self.output_filter is None or status in self.output_filter):
  96. logger.info("%1s %s", status, remove_surrogates(path))
  97. @staticmethod
  98. def compare_chunk_contents(chunks1, chunks2):
  99. """Compare two chunk iterators (like returned by :meth:`.DownloadPipeline.fetch_many`)"""
  100. end = object()
  101. alen = ai = 0
  102. blen = bi = 0
  103. while True:
  104. if not alen - ai:
  105. a = next(chunks1, end)
  106. if a is end:
  107. return not blen - bi and next(chunks2, end) is end
  108. a = memoryview(a)
  109. alen = len(a)
  110. ai = 0
  111. if not blen - bi:
  112. b = next(chunks2, end)
  113. if b is end:
  114. return not alen - ai and next(chunks1, end) is end
  115. b = memoryview(b)
  116. blen = len(b)
  117. bi = 0
  118. slicelen = min(alen - ai, blen - bi)
  119. if a[ai:ai + slicelen] != b[bi:bi + slicelen]:
  120. return False
  121. ai += slicelen
  122. bi += slicelen
  123. @staticmethod
  124. def build_matcher(excludes, paths):
  125. matcher = PatternMatcher()
  126. if excludes:
  127. matcher.add(excludes, False)
  128. include_patterns = []
  129. if paths:
  130. include_patterns.extend(parse_pattern(i, PathPrefixPattern) for i in paths)
  131. matcher.add(include_patterns, True)
  132. matcher.fallback = not include_patterns
  133. return matcher, include_patterns
  134. def do_serve(self, args):
  135. """Start in server mode. This command is usually not used manually.
  136. """
  137. return RepositoryServer(restrict_to_paths=args.restrict_to_paths).serve()
  138. @with_repository(create=True, exclusive=True, manifest=False)
  139. def do_init(self, args, repository):
  140. """Initialize an empty repository"""
  141. logger.info('Initializing repository at "%s"' % args.location.canonical_path())
  142. try:
  143. key = key_creator(repository, args)
  144. except (EOFError, KeyboardInterrupt):
  145. repository.destroy()
  146. return EXIT_WARNING
  147. manifest = Manifest(key, repository)
  148. manifest.key = key
  149. manifest.write()
  150. repository.commit()
  151. with Cache(repository, key, manifest, warn_if_unencrypted=False):
  152. pass
  153. return self.exit_code
  154. @with_repository(exclusive='repair', manifest=False)
  155. def do_check(self, args, repository):
  156. """Check repository consistency"""
  157. if args.repair:
  158. msg = ("'check --repair' is an experimental feature that might result in data loss." +
  159. "\n" +
  160. "Type 'YES' if you understand this and want to continue: ")
  161. if not yes(msg, false_msg="Aborting.", truish=('YES', ),
  162. env_var_override='BORG_CHECK_I_KNOW_WHAT_I_AM_DOING'):
  163. return EXIT_ERROR
  164. if not args.archives_only:
  165. if not repository.check(repair=args.repair, save_space=args.save_space):
  166. return EXIT_WARNING
  167. if not args.repo_only and not ArchiveChecker().check(
  168. repository, repair=args.repair, archive=args.location.archive,
  169. last=args.last, prefix=args.prefix, save_space=args.save_space):
  170. return EXIT_WARNING
  171. return EXIT_SUCCESS
  172. @with_repository()
  173. def do_change_passphrase(self, args, repository, manifest, key):
  174. """Change repository key file passphrase"""
  175. key.change_passphrase()
  176. return EXIT_SUCCESS
  177. @with_repository(manifest=False)
  178. def do_migrate_to_repokey(self, args, repository):
  179. """Migrate passphrase -> repokey"""
  180. manifest_data = repository.get(Manifest.MANIFEST_ID)
  181. key_old = PassphraseKey.detect(repository, manifest_data)
  182. key_new = RepoKey(repository)
  183. key_new.target = repository
  184. key_new.repository_id = repository.id
  185. key_new.enc_key = key_old.enc_key
  186. key_new.enc_hmac_key = key_old.enc_hmac_key
  187. key_new.id_key = key_old.id_key
  188. key_new.chunk_seed = key_old.chunk_seed
  189. key_new.change_passphrase() # option to change key protection passphrase, save
  190. return EXIT_SUCCESS
  191. @with_repository(fake='dry_run')
  192. def do_create(self, args, repository, manifest=None, key=None):
  193. """Create new archive"""
  194. matcher = PatternMatcher(fallback=True)
  195. if args.excludes:
  196. matcher.add(args.excludes, False)
  197. def create_inner(archive, cache):
  198. # Add cache dir to inode_skip list
  199. skip_inodes = set()
  200. try:
  201. st = os.stat(get_cache_dir())
  202. skip_inodes.add((st.st_ino, st.st_dev))
  203. except OSError:
  204. pass
  205. # Add local repository dir to inode_skip list
  206. if not args.location.host:
  207. try:
  208. st = os.stat(args.location.path)
  209. skip_inodes.add((st.st_ino, st.st_dev))
  210. except OSError:
  211. pass
  212. for path in args.paths:
  213. if path == '-': # stdin
  214. path = 'stdin'
  215. if not dry_run:
  216. try:
  217. status = archive.process_stdin(path, cache)
  218. except OSError as e:
  219. status = 'E'
  220. self.print_warning('%s: %s', path, e)
  221. else:
  222. status = '-'
  223. self.print_file_status(status, path)
  224. continue
  225. path = os.path.normpath(path)
  226. if args.one_file_system:
  227. try:
  228. restrict_dev = os.lstat(path).st_dev
  229. except OSError as e:
  230. self.print_warning('%s: %s', path, e)
  231. continue
  232. else:
  233. restrict_dev = None
  234. self._process(archive, cache, matcher, args.exclude_caches, args.exclude_if_present,
  235. args.keep_tag_files, skip_inodes, path, restrict_dev,
  236. read_special=args.read_special, dry_run=dry_run)
  237. if not dry_run:
  238. archive.save(comment=args.comment, timestamp=args.timestamp)
  239. if args.progress:
  240. archive.stats.show_progress(final=True)
  241. if args.stats:
  242. archive.end = datetime.utcnow()
  243. log_multi(DASHES,
  244. str(archive),
  245. DASHES,
  246. str(archive.stats),
  247. str(cache),
  248. DASHES)
  249. self.output_filter = args.output_filter
  250. self.output_list = args.output_list
  251. self.ignore_inode = args.ignore_inode
  252. dry_run = args.dry_run
  253. t0 = datetime.utcnow()
  254. if not dry_run:
  255. compr_args = dict(buffer=COMPR_BUFFER)
  256. compr_args.update(args.compression)
  257. key.compressor = Compressor(**compr_args)
  258. with Cache(repository, key, manifest, do_files=args.cache_files, lock_wait=self.lock_wait) as cache:
  259. archive = Archive(repository, key, manifest, args.location.archive, cache=cache,
  260. create=True, checkpoint_interval=args.checkpoint_interval,
  261. numeric_owner=args.numeric_owner, progress=args.progress,
  262. chunker_params=args.chunker_params, start=t0)
  263. create_inner(archive, cache)
  264. else:
  265. create_inner(None, None)
  266. return self.exit_code
  267. def _process(self, archive, cache, matcher, exclude_caches, exclude_if_present,
  268. keep_tag_files, skip_inodes, path, restrict_dev,
  269. read_special=False, dry_run=False):
  270. if not matcher.match(path):
  271. self.print_file_status('x', path)
  272. return
  273. try:
  274. st = os.lstat(path)
  275. except OSError as e:
  276. self.print_warning('%s: %s', path, e)
  277. return
  278. if (st.st_ino, st.st_dev) in skip_inodes:
  279. return
  280. # Entering a new filesystem?
  281. if restrict_dev is not None and st.st_dev != restrict_dev:
  282. return
  283. status = None
  284. # Ignore if nodump flag is set
  285. if has_lchflags and (st.st_flags & stat.UF_NODUMP):
  286. return
  287. if stat.S_ISREG(st.st_mode) or read_special and not stat.S_ISDIR(st.st_mode):
  288. if not dry_run:
  289. try:
  290. status = archive.process_file(path, st, cache, self.ignore_inode)
  291. except OSError as e:
  292. status = 'E'
  293. self.print_warning('%s: %s', path, e)
  294. elif stat.S_ISDIR(st.st_mode):
  295. tag_paths = dir_is_tagged(path, exclude_caches, exclude_if_present)
  296. if tag_paths:
  297. if keep_tag_files and not dry_run:
  298. archive.process_dir(path, st)
  299. for tag_path in tag_paths:
  300. self._process(archive, cache, matcher, exclude_caches, exclude_if_present,
  301. keep_tag_files, skip_inodes, tag_path, restrict_dev,
  302. read_special=read_special, dry_run=dry_run)
  303. return
  304. if not dry_run:
  305. status = archive.process_dir(path, st)
  306. try:
  307. entries = os.listdir(path)
  308. except OSError as e:
  309. status = 'E'
  310. self.print_warning('%s: %s', path, e)
  311. else:
  312. for filename in sorted(entries):
  313. entry_path = os.path.normpath(os.path.join(path, filename))
  314. self._process(archive, cache, matcher, exclude_caches, exclude_if_present,
  315. keep_tag_files, skip_inodes, entry_path, restrict_dev,
  316. read_special=read_special, dry_run=dry_run)
  317. elif stat.S_ISLNK(st.st_mode):
  318. if not dry_run:
  319. status = archive.process_symlink(path, st)
  320. elif stat.S_ISFIFO(st.st_mode):
  321. if not dry_run:
  322. status = archive.process_fifo(path, st)
  323. elif stat.S_ISCHR(st.st_mode) or stat.S_ISBLK(st.st_mode):
  324. if not dry_run:
  325. status = archive.process_dev(path, st)
  326. elif stat.S_ISSOCK(st.st_mode):
  327. # Ignore unix sockets
  328. return
  329. elif stat.S_ISDOOR(st.st_mode):
  330. # Ignore Solaris doors
  331. return
  332. elif stat.S_ISPORT(st.st_mode):
  333. # Ignore Solaris event ports
  334. return
  335. else:
  336. self.print_warning('Unknown file type: %s', path)
  337. return
  338. # Status output
  339. if status is None:
  340. if not dry_run:
  341. status = '?' # need to add a status code somewhere
  342. else:
  343. status = '-' # dry run, item was not backed up
  344. self.print_file_status(status, path)
  345. @with_repository()
  346. @with_archive
  347. def do_extract(self, args, repository, manifest, key, archive):
  348. """Extract archive contents"""
  349. # be restrictive when restoring files, restore permissions later
  350. if sys.getfilesystemencoding() == 'ascii':
  351. logger.warning('Warning: File system encoding is "ascii", extracting non-ascii filenames will not be supported.')
  352. if sys.platform.startswith(('linux', 'freebsd', 'netbsd', 'openbsd', 'darwin', )):
  353. logger.warning('Hint: You likely need to fix your locale setup. E.g. install locales and use: LANG=en_US.UTF-8')
  354. matcher, include_patterns = self.build_matcher(args.excludes, args.paths)
  355. output_list = args.output_list
  356. dry_run = args.dry_run
  357. stdout = args.stdout
  358. sparse = args.sparse
  359. strip_components = args.strip_components
  360. dirs = []
  361. partial_extract = not matcher.empty() or strip_components
  362. hardlink_masters = {} if partial_extract else None
  363. def item_is_hardlink_master(item):
  364. return (partial_extract and stat.S_ISREG(item[b'mode']) and
  365. item.get(b'hardlink_master', True) and b'source' not in item)
  366. for item in archive.iter_items(preload=True,
  367. filter=lambda item: item_is_hardlink_master(item) or matcher.match(item[b'path'])):
  368. orig_path = item[b'path']
  369. if item_is_hardlink_master(item):
  370. hardlink_masters[orig_path] = (item.get(b'chunks'), None)
  371. if not matcher.match(item[b'path']):
  372. continue
  373. if strip_components:
  374. item[b'path'] = os.sep.join(orig_path.split(os.sep)[strip_components:])
  375. if not item[b'path']:
  376. continue
  377. if not args.dry_run:
  378. while dirs and not item[b'path'].startswith(dirs[-1][b'path']):
  379. archive.extract_item(dirs.pop(-1), stdout=stdout)
  380. if output_list:
  381. logger.info(remove_surrogates(orig_path))
  382. try:
  383. if dry_run:
  384. archive.extract_item(item, dry_run=True)
  385. else:
  386. if stat.S_ISDIR(item[b'mode']):
  387. dirs.append(item)
  388. archive.extract_item(item, restore_attrs=False)
  389. else:
  390. archive.extract_item(item, stdout=stdout, sparse=sparse, hardlink_masters=hardlink_masters,
  391. original_path=orig_path)
  392. except OSError as e:
  393. self.print_warning('%s: %s', remove_surrogates(orig_path), e)
  394. if not args.dry_run:
  395. while dirs:
  396. archive.extract_item(dirs.pop(-1))
  397. for pattern in include_patterns:
  398. if pattern.match_count == 0:
  399. self.print_warning("Include pattern '%s' never matched.", pattern)
  400. return self.exit_code
  401. @with_repository()
  402. @with_archive
  403. def do_diff(self, args, repository, manifest, key, archive):
  404. """Diff contents of two archives"""
  405. def fetch_and_compare_chunks(chunk_ids1, chunk_ids2, archive1, archive2):
  406. chunks1 = archive1.pipeline.fetch_many(chunk_ids1)
  407. chunks2 = archive2.pipeline.fetch_many(chunk_ids2)
  408. return self.compare_chunk_contents(chunks1, chunks2)
  409. def sum_chunk_size(item, consider_ids=None):
  410. if item.get(b'deleted'):
  411. return None
  412. else:
  413. return sum(c.size for c in item[b'chunks']
  414. if consider_ids is None or c.id in consider_ids)
  415. def get_owner(item):
  416. if args.numeric_owner:
  417. return item[b'uid'], item[b'gid']
  418. else:
  419. return item[b'user'], item[b'group']
  420. def get_mode(item):
  421. if b'mode' in item:
  422. return stat.filemode(item[b'mode'])
  423. else:
  424. return [None]
  425. def has_hardlink_master(item, hardlink_masters):
  426. return item.get(b'source') in hardlink_masters and get_mode(item)[0] != 'l'
  427. def compare_link(item1, item2):
  428. # These are the simple link cases. For special cases, e.g. if a
  429. # regular file is replaced with a link or vice versa, it is
  430. # indicated in compare_mode instead.
  431. if item1.get(b'deleted'):
  432. return 'added link'
  433. elif item2.get(b'deleted'):
  434. return 'removed link'
  435. elif b'source' in item1 and b'source' in item2 and item1[b'source'] != item2[b'source']:
  436. return 'changed link'
  437. def contents_changed(item1, item2):
  438. if can_compare_chunk_ids:
  439. return item1[b'chunks'] != item2[b'chunks']
  440. else:
  441. if sum_chunk_size(item1) != sum_chunk_size(item2):
  442. return True
  443. else:
  444. chunk_ids1 = [c.id for c in item1[b'chunks']]
  445. chunk_ids2 = [c.id for c in item2[b'chunks']]
  446. return not fetch_and_compare_chunks(chunk_ids1, chunk_ids2, archive1, archive2)
  447. def compare_content(path, item1, item2):
  448. if contents_changed(item1, item2):
  449. if item1.get(b'deleted'):
  450. return ('added {:>13}'.format(format_file_size(sum_chunk_size(item2))))
  451. elif item2.get(b'deleted'):
  452. return ('removed {:>11}'.format(format_file_size(sum_chunk_size(item1))))
  453. else:
  454. chunk_ids1 = {c.id for c in item1[b'chunks']}
  455. chunk_ids2 = {c.id for c in item2[b'chunks']}
  456. added_ids = chunk_ids2 - chunk_ids1
  457. removed_ids = chunk_ids1 - chunk_ids2
  458. added = sum_chunk_size(item2, added_ids)
  459. removed = sum_chunk_size(item1, removed_ids)
  460. return ('{:>9} {:>9}'.format(format_file_size(added, precision=1, sign=True),
  461. format_file_size(-removed, precision=1, sign=True)))
  462. def compare_directory(item1, item2):
  463. if item2.get(b'deleted') and not item1.get(b'deleted'):
  464. return 'removed directory'
  465. elif item1.get(b'deleted') and not item2.get(b'deleted'):
  466. return 'added directory'
  467. def compare_owner(item1, item2):
  468. user1, group1 = get_owner(item1)
  469. user2, group2 = get_owner(item2)
  470. if user1 != user2 or group1 != group2:
  471. return '[{}:{} -> {}:{}]'.format(user1, group1, user2, group2)
  472. def compare_mode(item1, item2):
  473. if item1[b'mode'] != item2[b'mode']:
  474. return '[{} -> {}]'.format(get_mode(item1), get_mode(item2))
  475. def compare_items(output, path, item1, item2, hardlink_masters, deleted=False):
  476. """
  477. Compare two items with identical paths.
  478. :param deleted: Whether one of the items has been deleted
  479. """
  480. changes = []
  481. if item1.get(b'hardlink_master') or item2.get(b'hardlink_master'):
  482. hardlink_masters[path] = (item1, item2)
  483. if has_hardlink_master(item1, hardlink_masters):
  484. item1 = hardlink_masters[item1[b'source']][0]
  485. if has_hardlink_master(item2, hardlink_masters):
  486. item2 = hardlink_masters[item2[b'source']][1]
  487. if get_mode(item1)[0] == 'l' or get_mode(item2)[0] == 'l':
  488. changes.append(compare_link(item1, item2))
  489. if b'chunks' in item1 and b'chunks' in item2:
  490. changes.append(compare_content(path, item1, item2))
  491. if get_mode(item1)[0] == 'd' or get_mode(item2)[0] == 'd':
  492. changes.append(compare_directory(item1, item2))
  493. if not deleted:
  494. changes.append(compare_owner(item1, item2))
  495. changes.append(compare_mode(item1, item2))
  496. changes = [x for x in changes if x]
  497. if changes:
  498. output_line = (remove_surrogates(path), ' '.join(changes))
  499. if args.sort:
  500. output.append(output_line)
  501. else:
  502. print_output(output_line)
  503. def print_output(line):
  504. print("{:<19} {}".format(line[1], line[0]))
  505. def compare_archives(archive1, archive2, matcher):
  506. orphans_archive1 = collections.OrderedDict()
  507. orphans_archive2 = collections.OrderedDict()
  508. hardlink_masters = {}
  509. output = []
  510. for item1, item2 in zip_longest(
  511. archive1.iter_items(lambda item: matcher.match(item[b'path'])),
  512. archive2.iter_items(lambda item: matcher.match(item[b'path'])),
  513. ):
  514. if item1 and item2 and item1[b'path'] == item2[b'path']:
  515. compare_items(output, item1[b'path'], item1, item2, hardlink_masters)
  516. continue
  517. if item1:
  518. matching_orphan = orphans_archive2.pop(item1[b'path'], None)
  519. if matching_orphan:
  520. compare_items(output, item1[b'path'], item1, matching_orphan, hardlink_masters)
  521. else:
  522. orphans_archive1[item1[b'path']] = item1
  523. if item2:
  524. matching_orphan = orphans_archive1.pop(item2[b'path'], None)
  525. if matching_orphan:
  526. compare_items(output, item2[b'path'], matching_orphan, item2, hardlink_masters)
  527. else:
  528. orphans_archive2[item2[b'path']] = item2
  529. # At this point orphans_* contain items that had no matching partner in the other archive
  530. for added in orphans_archive2.values():
  531. compare_items(output, added[b'path'], {
  532. b'deleted': True,
  533. b'chunks': [],
  534. }, added, hardlink_masters, deleted=True)
  535. for deleted in orphans_archive1.values():
  536. compare_items(output, deleted[b'path'], deleted, {
  537. b'deleted': True,
  538. b'chunks': [],
  539. }, hardlink_masters, deleted=True)
  540. for line in sorted(output):
  541. print_output(line)
  542. archive1 = archive
  543. archive2 = Archive(repository, key, manifest, args.archive2)
  544. can_compare_chunk_ids = archive1.metadata.get(b'chunker_params', False) == archive2.metadata.get(
  545. b'chunker_params', True) or args.same_chunker_params
  546. if not can_compare_chunk_ids:
  547. self.print_warning('--chunker-params might be different between archives, diff will be slow.\n'
  548. 'If you know for certain that they are the same, pass --same-chunker-params '
  549. 'to override this check.')
  550. matcher, include_patterns = self.build_matcher(args.excludes, args.paths)
  551. compare_archives(archive1, archive2, matcher)
  552. for pattern in include_patterns:
  553. if pattern.match_count == 0:
  554. self.print_warning("Include pattern '%s' never matched.", pattern)
  555. return self.exit_code
  556. @with_repository(exclusive=True, cache=True)
  557. @with_archive
  558. def do_rename(self, args, repository, manifest, key, cache, archive):
  559. """Rename an existing archive"""
  560. archive.rename(args.name)
  561. manifest.write()
  562. repository.commit()
  563. cache.commit()
  564. return self.exit_code
  565. @with_repository(exclusive=True)
  566. def do_delete(self, args, repository, manifest, key):
  567. """Delete an existing repository or archive"""
  568. if args.location.archive:
  569. with Cache(repository, key, manifest, lock_wait=self.lock_wait) as cache:
  570. archive = Archive(repository, key, manifest, args.location.archive, cache=cache)
  571. stats = Statistics()
  572. archive.delete(stats, progress=args.progress)
  573. manifest.write()
  574. repository.commit(save_space=args.save_space)
  575. cache.commit()
  576. logger.info("Archive deleted.")
  577. if args.stats:
  578. log_multi(DASHES,
  579. stats.summary.format(label='Deleted data:', stats=stats),
  580. str(cache),
  581. DASHES)
  582. else:
  583. if not args.cache_only:
  584. msg = []
  585. msg.append("You requested to completely DELETE the repository *including* all archives it contains:")
  586. for archive_info in manifest.list_archive_infos(sort_by='ts'):
  587. msg.append(format_archive(archive_info))
  588. msg.append("Type 'YES' if you understand this and want to continue: ")
  589. msg = '\n'.join(msg)
  590. if not yes(msg, false_msg="Aborting.", truish=('YES', ),
  591. env_var_override='BORG_DELETE_I_KNOW_WHAT_I_AM_DOING'):
  592. self.exit_code = EXIT_ERROR
  593. return self.exit_code
  594. repository.destroy()
  595. logger.info("Repository deleted.")
  596. Cache.destroy(repository)
  597. logger.info("Cache deleted.")
  598. return self.exit_code
  599. @with_repository()
  600. def do_mount(self, args, repository, manifest, key):
  601. """Mount archive or an entire repository as a FUSE fileystem"""
  602. try:
  603. from .fuse import FuseOperations
  604. except ImportError as e:
  605. self.print_error('Loading fuse support failed [ImportError: %s]' % str(e))
  606. return self.exit_code
  607. if not os.path.isdir(args.mountpoint) or not os.access(args.mountpoint, os.R_OK | os.W_OK | os.X_OK):
  608. self.print_error('%s: Mountpoint must be a writable directory' % args.mountpoint)
  609. return self.exit_code
  610. with cache_if_remote(repository) as cached_repo:
  611. if args.location.archive:
  612. archive = Archive(repository, key, manifest, args.location.archive)
  613. else:
  614. archive = None
  615. operations = FuseOperations(key, repository, manifest, archive, cached_repo)
  616. logger.info("Mounting filesystem")
  617. try:
  618. operations.mount(args.mountpoint, args.options, args.foreground)
  619. except RuntimeError:
  620. # Relevant error message already printed to stderr by fuse
  621. self.exit_code = EXIT_ERROR
  622. return self.exit_code
  623. @with_repository()
  624. def do_list(self, args, repository, manifest, key):
  625. """List archive or repository contents"""
  626. if args.location.archive:
  627. matcher, _ = self.build_matcher(args.excludes, args.paths)
  628. with Cache(repository, key, manifest, lock_wait=self.lock_wait) as cache:
  629. archive = Archive(repository, key, manifest, args.location.archive, cache=cache)
  630. if args.format:
  631. format = args.format
  632. elif args.short:
  633. format = "{path}{NL}"
  634. else:
  635. format = "{mode} {user:6} {group:6} {size:8} {isomtime} {path}{extra}{NL}"
  636. formatter = ItemFormatter(archive, format)
  637. if not hasattr(sys.stdout, 'buffer'):
  638. # This is a shim for supporting unit tests replacing sys.stdout with e.g. StringIO,
  639. # which doesn't have an underlying buffer (= lower file object).
  640. def write(bytestring):
  641. sys.stdout.write(bytestring.decode('utf-8', errors='replace'))
  642. else:
  643. write = sys.stdout.buffer.write
  644. for item in archive.iter_items(lambda item: matcher.match(item[b'path'])):
  645. write(formatter.format_item(item).encode('utf-8', errors='surrogateescape'))
  646. else:
  647. for archive_info in manifest.list_archive_infos(sort_by='ts'):
  648. if args.prefix and not archive_info.name.startswith(args.prefix):
  649. continue
  650. if args.short:
  651. print(archive_info.name)
  652. else:
  653. print(format_archive(archive_info))
  654. return self.exit_code
  655. @with_repository(cache=True)
  656. @with_archive
  657. def do_info(self, args, repository, manifest, key, archive, cache):
  658. """Show archive details such as disk space used"""
  659. def format_cmdline(cmdline):
  660. return remove_surrogates(' '.join(shlex.quote(x) for x in cmdline))
  661. stats = archive.calc_stats(cache)
  662. print('Name:', archive.name)
  663. print('Fingerprint: %s' % hexlify(archive.id).decode('ascii'))
  664. print('Comment:', archive.metadata.get(b'comment', ''))
  665. print('Hostname:', archive.metadata[b'hostname'])
  666. print('Username:', archive.metadata[b'username'])
  667. print('Time (start): %s' % format_time(to_localtime(archive.ts)))
  668. print('Time (end): %s' % format_time(to_localtime(archive.ts_end)))
  669. print('Command line:', format_cmdline(archive.metadata[b'cmdline']))
  670. print('Number of files: %d' % stats.nfiles)
  671. print()
  672. print(str(stats))
  673. print(str(cache))
  674. return self.exit_code
  675. @with_repository()
  676. def do_prune(self, args, repository, manifest, key):
  677. """Prune repository archives according to specified rules"""
  678. archives = manifest.list_archive_infos(sort_by='ts', reverse=True) # just a ArchiveInfo list
  679. if args.hourly + args.daily + args.weekly + args.monthly + args.yearly == 0 and args.within is None:
  680. self.print_error('At least one of the "keep-within", "keep-hourly", "keep-daily", "keep-weekly", '
  681. '"keep-monthly" or "keep-yearly" settings must be specified')
  682. return self.exit_code
  683. if args.prefix:
  684. archives = [archive for archive in archives if archive.name.startswith(args.prefix)]
  685. keep = []
  686. if args.within:
  687. keep += prune_within(archives, args.within)
  688. if args.hourly:
  689. keep += prune_split(archives, '%Y-%m-%d %H', args.hourly, keep)
  690. if args.daily:
  691. keep += prune_split(archives, '%Y-%m-%d', args.daily, keep)
  692. if args.weekly:
  693. keep += prune_split(archives, '%G-%V', args.weekly, keep)
  694. if args.monthly:
  695. keep += prune_split(archives, '%Y-%m', args.monthly, keep)
  696. if args.yearly:
  697. keep += prune_split(archives, '%Y', args.yearly, keep)
  698. keep.sort(key=attrgetter('ts'), reverse=True)
  699. to_delete = [a for a in archives if a not in keep]
  700. stats = Statistics()
  701. with Cache(repository, key, manifest, do_files=args.cache_files, lock_wait=self.lock_wait) as cache:
  702. for archive in keep:
  703. if args.output_list:
  704. logger.info('Keeping archive: %s' % format_archive(archive))
  705. for archive in to_delete:
  706. if args.dry_run:
  707. if args.output_list:
  708. logger.info('Would prune: %s' % format_archive(archive))
  709. else:
  710. if args.output_list:
  711. logger.info('Pruning archive: %s' % format_archive(archive))
  712. Archive(repository, key, manifest, archive.name, cache).delete(stats)
  713. if to_delete and not args.dry_run:
  714. manifest.write()
  715. repository.commit(save_space=args.save_space)
  716. cache.commit()
  717. if args.stats:
  718. log_multi(DASHES,
  719. stats.summary.format(label='Deleted data:', stats=stats),
  720. str(cache),
  721. DASHES)
  722. return self.exit_code
  723. def do_upgrade(self, args):
  724. """upgrade a repository from a previous version"""
  725. # mainly for upgrades from Attic repositories,
  726. # but also supports borg 0.xx -> 1.0 upgrade.
  727. repo = AtticRepositoryUpgrader(args.location.path, create=False)
  728. try:
  729. repo.upgrade(args.dry_run, inplace=args.inplace, progress=args.progress)
  730. except NotImplementedError as e:
  731. print("warning: %s" % e)
  732. repo = BorgRepositoryUpgrader(args.location.path, create=False)
  733. try:
  734. repo.upgrade(args.dry_run, inplace=args.inplace, progress=args.progress)
  735. except NotImplementedError as e:
  736. print("warning: %s" % e)
  737. return self.exit_code
  738. @with_repository(cache=True, exclusive=True)
  739. def do_recreate(self, args, repository, manifest, key, cache):
  740. """Re-create archives"""
  741. def interrupt(signal_num, stack_frame):
  742. if recreater.interrupt:
  743. print("\nReceived signal, again. I'm not deaf.", file=sys.stderr)
  744. else:
  745. print("\nReceived signal, will exit cleanly.", file=sys.stderr)
  746. recreater.interrupt = True
  747. msg = ("recreate is an experimental feature.\n"
  748. "Type 'YES' if you understand this and want to continue: ")
  749. if not yes(msg, false_msg="Aborting.", truish=('YES',),
  750. env_var_override='BORG_RECREATE_I_KNOW_WHAT_I_AM_DOING'):
  751. return EXIT_ERROR
  752. matcher, include_patterns = self.build_matcher(args.excludes, args.paths)
  753. self.output_list = args.output_list
  754. self.output_filter = args.output_filter
  755. recreater = ArchiveRecreater(repository, manifest, key, cache, matcher,
  756. exclude_caches=args.exclude_caches, exclude_if_present=args.exclude_if_present,
  757. keep_tag_files=args.keep_tag_files,
  758. compression=args.compression, chunker_params=args.chunker_params,
  759. progress=args.progress, stats=args.stats,
  760. file_status_printer=self.print_file_status,
  761. dry_run=args.dry_run)
  762. signal.signal(signal.SIGTERM, interrupt)
  763. signal.signal(signal.SIGINT, interrupt)
  764. if args.location.archive:
  765. name = args.location.archive
  766. if recreater.is_temporary_archive(name):
  767. self.print_error('Refusing to work on temporary archive of prior recreate: %s', name)
  768. return self.exit_code
  769. recreater.recreate(name, args.comment)
  770. else:
  771. for archive in manifest.list_archive_infos(sort_by='ts'):
  772. name = archive.name
  773. if recreater.is_temporary_archive(name):
  774. continue
  775. print('Processing', name)
  776. if not recreater.recreate(name, args.comment):
  777. break
  778. manifest.write()
  779. repository.commit()
  780. cache.commit()
  781. return self.exit_code
  782. @with_repository()
  783. def do_debug_dump_archive_items(self, args, repository, manifest, key):
  784. """dump (decrypted, decompressed) archive items metadata (not: data)"""
  785. archive = Archive(repository, key, manifest, args.location.archive)
  786. for i, item_id in enumerate(archive.metadata[b'items']):
  787. data = key.decrypt(item_id, repository.get(item_id))
  788. filename = '%06d_%s.items' % (i, hexlify(item_id).decode('ascii'))
  789. print('Dumping', filename)
  790. with open(filename, 'wb') as fd:
  791. fd.write(data)
  792. print('Done.')
  793. return EXIT_SUCCESS
  794. @with_repository(manifest=False)
  795. def do_debug_get_obj(self, args, repository):
  796. """get object contents from the repository and write it into file"""
  797. hex_id = args.id
  798. try:
  799. id = unhexlify(hex_id)
  800. except ValueError:
  801. print("object id %s is invalid." % hex_id)
  802. else:
  803. try:
  804. data = repository.get(id)
  805. except repository.ObjectNotFound:
  806. print("object %s not found." % hex_id)
  807. else:
  808. with open(args.path, "wb") as f:
  809. f.write(data)
  810. print("object %s fetched." % hex_id)
  811. return EXIT_SUCCESS
  812. @with_repository(manifest=False)
  813. def do_debug_put_obj(self, args, repository):
  814. """put file(s) contents into the repository"""
  815. for path in args.paths:
  816. with open(path, "rb") as f:
  817. data = f.read()
  818. h = hashlib.sha256(data) # XXX hardcoded
  819. repository.put(h.digest(), data)
  820. print("object %s put." % h.hexdigest())
  821. repository.commit()
  822. return EXIT_SUCCESS
  823. @with_repository(manifest=False)
  824. def do_debug_delete_obj(self, args, repository):
  825. """delete the objects with the given IDs from the repo"""
  826. modified = False
  827. for hex_id in args.ids:
  828. try:
  829. id = unhexlify(hex_id)
  830. except ValueError:
  831. print("object id %s is invalid." % hex_id)
  832. else:
  833. try:
  834. repository.delete(id)
  835. modified = True
  836. print("object %s deleted." % hex_id)
  837. except repository.ObjectNotFound:
  838. print("object %s not found." % hex_id)
  839. if modified:
  840. repository.commit()
  841. print('Done.')
  842. return EXIT_SUCCESS
  843. @with_repository(lock=False, manifest=False)
  844. def do_break_lock(self, args, repository):
  845. """Break the repository lock (e.g. in case it was left by a dead borg."""
  846. repository.break_lock()
  847. Cache.break_lock(repository)
  848. return self.exit_code
  849. helptext = {}
  850. helptext['patterns'] = textwrap.dedent('''
  851. Exclusion patterns support four separate styles, fnmatch, shell, regular
  852. expressions and path prefixes. If followed by a colon (':') the first two
  853. characters of a pattern are used as a style selector. Explicit style
  854. selection is necessary when a non-default style is desired or when the
  855. desired pattern starts with two alphanumeric characters followed by a colon
  856. (i.e. `aa:something/*`).
  857. `Fnmatch <https://docs.python.org/3/library/fnmatch.html>`_, selector `fm:`
  858. These patterns use a variant of shell pattern syntax, with '*' matching
  859. any number of characters, '?' matching any single character, '[...]'
  860. matching any single character specified, including ranges, and '[!...]'
  861. matching any character not specified. For the purpose of these patterns,
  862. the path separator ('\\' for Windows and '/' on other systems) is not
  863. treated specially. Wrap meta-characters in brackets for a literal match
  864. (i.e. `[?]` to match the literal character `?`). For a path to match
  865. a pattern, it must completely match from start to end, or must match from
  866. the start to just before a path separator. Except for the root path,
  867. paths will never end in the path separator when matching is attempted.
  868. Thus, if a given pattern ends in a path separator, a '*' is appended
  869. before matching is attempted.
  870. Shell-style patterns, selector `sh:`
  871. Like fnmatch patterns these are similar to shell patterns. The difference
  872. is that the pattern may include `**/` for matching zero or more directory
  873. levels, `*` for matching zero or more arbitrary characters with the
  874. exception of any path separator.
  875. Regular expressions, selector `re:`
  876. Regular expressions similar to those found in Perl are supported. Unlike
  877. shell patterns regular expressions are not required to match the complete
  878. path and any substring match is sufficient. It is strongly recommended to
  879. anchor patterns to the start ('^'), to the end ('$') or both. Path
  880. separators ('\\' for Windows and '/' on other systems) in paths are
  881. always normalized to a forward slash ('/') before applying a pattern. The
  882. regular expression syntax is described in the `Python documentation for
  883. the re module <https://docs.python.org/3/library/re.html>`_.
  884. Prefix path, selector `pp:`
  885. This pattern style is useful to match whole sub-directories. The pattern
  886. `pp:/data/bar` matches `/data/bar` and everything therein.
  887. Exclusions can be passed via the command line option `--exclude`. When used
  888. from within a shell the patterns should be quoted to protect them from
  889. expansion.
  890. The `--exclude-from` option permits loading exclusion patterns from a text
  891. file with one pattern per line. Lines empty or starting with the number sign
  892. ('#') after removing whitespace on both ends are ignored. The optional style
  893. selector prefix is also supported for patterns loaded from a file. Due to
  894. whitespace removal paths with whitespace at the beginning or end can only be
  895. excluded using regular expressions.
  896. Examples:
  897. # Exclude '/home/user/file.o' but not '/home/user/file.odt':
  898. $ borg create -e '*.o' backup /
  899. # Exclude '/home/user/junk' and '/home/user/subdir/junk' but
  900. # not '/home/user/importantjunk' or '/etc/junk':
  901. $ borg create -e '/home/*/junk' backup /
  902. # Exclude the contents of '/home/user/cache' but not the directory itself:
  903. $ borg create -e /home/user/cache/ backup /
  904. # The file '/home/user/cache/important' is *not* backed up:
  905. $ borg create -e /home/user/cache/ backup / /home/user/cache/important
  906. # The contents of directories in '/home' are not backed up when their name
  907. # ends in '.tmp'
  908. $ borg create --exclude 're:^/home/[^/]+\.tmp/' backup /
  909. # Load exclusions from file
  910. $ cat >exclude.txt <<EOF
  911. # Comment line
  912. /home/*/junk
  913. *.tmp
  914. fm:aa:something/*
  915. re:^/home/[^/]\.tmp/
  916. sh:/home/*/.thumbnails
  917. EOF
  918. $ borg create --exclude-from exclude.txt backup /
  919. ''')
  920. def do_help(self, parser, commands, args):
  921. if not args.topic:
  922. parser.print_help()
  923. elif args.topic in self.helptext:
  924. print(self.helptext[args.topic])
  925. elif args.topic in commands:
  926. if args.epilog_only:
  927. print(commands[args.topic].epilog)
  928. elif args.usage_only:
  929. commands[args.topic].epilog = None
  930. commands[args.topic].print_help()
  931. else:
  932. commands[args.topic].print_help()
  933. else:
  934. parser.error('No help available on %s' % (args.topic,))
  935. return self.exit_code
  936. def preprocess_args(self, args):
  937. deprecations = [
  938. # ('--old', '--new', 'Warning: "--old" has been deprecated. Use "--new" instead.'),
  939. ('--list-format', '--format', 'Warning: "--list-format" has been deprecated. Use "--format" instead.'),
  940. ]
  941. for i, arg in enumerate(args[:]):
  942. for old_name, new_name, warning in deprecations:
  943. if arg.startswith(old_name):
  944. args[i] = arg.replace(old_name, new_name)
  945. self.print_warning(warning)
  946. return args
  947. def build_parser(self, args=None, prog=None):
  948. common_parser = argparse.ArgumentParser(add_help=False, prog=prog)
  949. common_group = common_parser.add_argument_group('Common options')
  950. common_group.add_argument('-h', '--help', action='help', help='show this help message and exit')
  951. common_group.add_argument('--critical', dest='log_level',
  952. action='store_const', const='critical', default='warning',
  953. help='work on log level CRITICAL')
  954. common_group.add_argument('--error', dest='log_level',
  955. action='store_const', const='error', default='warning',
  956. help='work on log level ERROR')
  957. common_group.add_argument('--warning', dest='log_level',
  958. action='store_const', const='warning', default='warning',
  959. help='work on log level WARNING (default)')
  960. common_group.add_argument('--info', '-v', '--verbose', dest='log_level',
  961. action='store_const', const='info', default='warning',
  962. help='work on log level INFO')
  963. common_group.add_argument('--debug', dest='log_level',
  964. action='store_const', const='debug', default='warning',
  965. help='enable debug output, work on log level DEBUG')
  966. common_group.add_argument('--lock-wait', dest='lock_wait', type=int, metavar='N', default=1,
  967. help='wait for the lock, but max. N seconds (default: %(default)d).')
  968. common_group.add_argument('--show-version', dest='show_version', action='store_true', default=False,
  969. help='show/log the borg version')
  970. common_group.add_argument('--show-rc', dest='show_rc', action='store_true', default=False,
  971. help='show/log the return code (rc)')
  972. common_group.add_argument('--no-files-cache', dest='cache_files', action='store_false',
  973. help='do not load/update the file metadata cache used to detect unchanged files')
  974. common_group.add_argument('--umask', dest='umask', type=lambda s: int(s, 8), default=UMASK_DEFAULT, metavar='M',
  975. help='set umask to M (local and remote, default: %(default)04o)')
  976. common_group.add_argument('--remote-path', dest='remote_path', default='borg', metavar='PATH',
  977. help='set remote path to executable (default: "%(default)s")')
  978. parser = argparse.ArgumentParser(prog=prog, description='Borg - Deduplicated Backups')
  979. parser.add_argument('-V', '--version', action='version', version='%(prog)s ' + __version__,
  980. help='show version number and exit')
  981. subparsers = parser.add_subparsers(title='required arguments', metavar='<command>')
  982. serve_epilog = textwrap.dedent("""
  983. This command starts a repository server process. This command is usually not used manually.
  984. """)
  985. subparser = subparsers.add_parser('serve', parents=[common_parser], add_help=False,
  986. description=self.do_serve.__doc__, epilog=serve_epilog,
  987. formatter_class=argparse.RawDescriptionHelpFormatter,
  988. help='start repository server process')
  989. subparser.set_defaults(func=self.do_serve)
  990. subparser.add_argument('--restrict-to-path', dest='restrict_to_paths', action='append',
  991. metavar='PATH', help='restrict repository access to PATH')
  992. init_epilog = textwrap.dedent("""
  993. This command initializes an empty repository. A repository is a filesystem
  994. directory containing the deduplicated data from zero or more archives.
  995. Encryption can be enabled at repository init time.
  996. """)
  997. subparser = subparsers.add_parser('init', parents=[common_parser], add_help=False,
  998. description=self.do_init.__doc__, epilog=init_epilog,
  999. formatter_class=argparse.RawDescriptionHelpFormatter,
  1000. help='initialize empty repository')
  1001. subparser.set_defaults(func=self.do_init)
  1002. subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
  1003. type=location_validator(archive=False),
  1004. help='repository to create')
  1005. subparser.add_argument('-e', '--encryption', dest='encryption',
  1006. choices=('none', 'keyfile', 'repokey'), default='repokey',
  1007. help='select encryption key mode (default: "%(default)s")')
  1008. check_epilog = textwrap.dedent("""
  1009. The check command verifies the consistency of a repository and the corresponding archives.
  1010. First, the underlying repository data files are checked:
  1011. - For all segments the segment magic (header) is checked
  1012. - For all objects stored in the segments, all metadata (e.g. crc and size) and
  1013. all data is read. The read data is checked by size and CRC. Bit rot and other
  1014. types of accidental damage can be detected this way.
  1015. - If we are in repair mode and a integrity error is detected for a segment,
  1016. we try to recover as many objects from the segment as possible.
  1017. - In repair mode, it makes sure that the index is consistent with the data
  1018. stored in the segments.
  1019. - If you use a remote repo server via ssh:, the repo check is executed on the
  1020. repo server without causing significant network traffic.
  1021. - The repository check can be skipped using the --archives-only option.
  1022. Second, the consistency and correctness of the archive metadata is verified:
  1023. - Is the repo manifest present? If not, it is rebuilt from archive metadata
  1024. chunks (this requires reading and decrypting of all metadata and data).
  1025. - Check if archive metadata chunk is present. if not, remove archive from
  1026. manifest.
  1027. - For all files (items) in the archive, for all chunks referenced by these
  1028. files, check if chunk is present (if not and we are in repair mode, replace
  1029. it with a same-size chunk of zeros). This requires reading of archive and
  1030. file metadata, but not data.
  1031. - If we are in repair mode and we checked all the archives: delete orphaned
  1032. chunks from the repo.
  1033. - if you use a remote repo server via ssh:, the archive check is executed on
  1034. the client machine (because if encryption is enabled, the checks will require
  1035. decryption and this is always done client-side, because key access will be
  1036. required).
  1037. - The archive checks can be time consuming, they can be skipped using the
  1038. --repository-only option.
  1039. """)
  1040. subparser = subparsers.add_parser('check', parents=[common_parser], add_help=False,
  1041. description=self.do_check.__doc__,
  1042. epilog=check_epilog,
  1043. formatter_class=argparse.RawDescriptionHelpFormatter,
  1044. help='verify repository')
  1045. subparser.set_defaults(func=self.do_check)
  1046. subparser.add_argument('location', metavar='REPOSITORY_OR_ARCHIVE', nargs='?', default='',
  1047. type=location_validator(),
  1048. help='repository or archive to check consistency of')
  1049. subparser.add_argument('--repository-only', dest='repo_only', action='store_true',
  1050. default=False,
  1051. help='only perform repository checks')
  1052. subparser.add_argument('--archives-only', dest='archives_only', action='store_true',
  1053. default=False,
  1054. help='only perform archives checks')
  1055. subparser.add_argument('--repair', dest='repair', action='store_true',
  1056. default=False,
  1057. help='attempt to repair any inconsistencies found')
  1058. subparser.add_argument('--save-space', dest='save_space', action='store_true',
  1059. default=False,
  1060. help='work slower, but using less space')
  1061. subparser.add_argument('--last', dest='last',
  1062. type=int, default=None, metavar='N',
  1063. help='only check last N archives (Default: all)')
  1064. subparser.add_argument('-P', '--prefix', dest='prefix', type=str,
  1065. help='only consider archive names starting with this prefix')
  1066. change_passphrase_epilog = textwrap.dedent("""
  1067. The key files used for repository encryption are optionally passphrase
  1068. protected. This command can be used to change this passphrase.
  1069. """)
  1070. subparser = subparsers.add_parser('change-passphrase', parents=[common_parser], add_help=False,
  1071. description=self.do_change_passphrase.__doc__,
  1072. epilog=change_passphrase_epilog,
  1073. formatter_class=argparse.RawDescriptionHelpFormatter,
  1074. help='change repository passphrase')
  1075. subparser.set_defaults(func=self.do_change_passphrase)
  1076. subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
  1077. type=location_validator(archive=False))
  1078. migrate_to_repokey_epilog = textwrap.dedent("""
  1079. This command migrates a repository from passphrase mode (not supported any
  1080. more) to repokey mode.
  1081. You will be first asked for the repository passphrase (to open it in passphrase
  1082. mode). This is the same passphrase as you used to use for this repo before 1.0.
  1083. It will then derive the different secrets from this passphrase.
  1084. Then you will be asked for a new passphrase (twice, for safety). This
  1085. passphrase will be used to protect the repokey (which contains these same
  1086. secrets in encrypted form). You may use the same passphrase as you used to
  1087. use, but you may also use a different one.
  1088. After migrating to repokey mode, you can change the passphrase at any time.
  1089. But please note: the secrets will always stay the same and they could always
  1090. be derived from your (old) passphrase-mode passphrase.
  1091. """)
  1092. subparser = subparsers.add_parser('migrate-to-repokey', parents=[common_parser], add_help=False,
  1093. description=self.do_migrate_to_repokey.__doc__,
  1094. epilog=migrate_to_repokey_epilog,
  1095. formatter_class=argparse.RawDescriptionHelpFormatter,
  1096. help='migrate passphrase-mode repository to repokey')
  1097. subparser.set_defaults(func=self.do_migrate_to_repokey)
  1098. subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
  1099. type=location_validator(archive=False))
  1100. create_epilog = textwrap.dedent("""
  1101. This command creates a backup archive containing all files found while recursively
  1102. traversing all paths specified. The archive will consume almost no disk space for
  1103. files or parts of files that have already been stored in other archives.
  1104. To speed up pulling backups over sshfs and similar network file systems which do
  1105. not provide correct inode information the --ignore-inode flag can be used. This
  1106. potentially decreases reliability of change detection, while avoiding always reading
  1107. all files on these file systems.
  1108. See the output of the "borg help patterns" command for more help on exclude patterns.
  1109. """)
  1110. subparser = subparsers.add_parser('create', parents=[common_parser], add_help=False,
  1111. description=self.do_create.__doc__,
  1112. epilog=create_epilog,
  1113. formatter_class=argparse.RawDescriptionHelpFormatter,
  1114. help='create backup')
  1115. subparser.set_defaults(func=self.do_create)
  1116. subparser.add_argument('-n', '--dry-run', dest='dry_run',
  1117. action='store_true', default=False,
  1118. help='do not create a backup archive')
  1119. subparser.add_argument('-s', '--stats', dest='stats',
  1120. action='store_true', default=False,
  1121. help='print statistics for the created archive')
  1122. subparser.add_argument('-p', '--progress', dest='progress',
  1123. action='store_true', default=False,
  1124. help='show progress display while creating the archive, showing Original, '
  1125. 'Compressed and Deduplicated sizes, followed by the Number of files seen '
  1126. 'and the path being processed, default: %(default)s')
  1127. subparser.add_argument('--list', dest='output_list',
  1128. action='store_true', default=False,
  1129. help='output verbose list of items (files, dirs, ...)')
  1130. subparser.add_argument('--filter', dest='output_filter', metavar='STATUSCHARS',
  1131. help='only display items with the given status characters')
  1132. exclude_group = subparser.add_argument_group('Exclusion options')
  1133. exclude_group.add_argument('-e', '--exclude', dest='excludes',
  1134. type=parse_pattern, action='append',
  1135. metavar="PATTERN", help='exclude paths matching PATTERN')
  1136. exclude_group.add_argument('--exclude-from', dest='exclude_files',
  1137. type=argparse.FileType('r'), action='append',
  1138. metavar='EXCLUDEFILE', help='read exclude patterns from EXCLUDEFILE, one per line')
  1139. exclude_group.add_argument('--exclude-caches', dest='exclude_caches',
  1140. action='store_true', default=False,
  1141. help='exclude directories that contain a CACHEDIR.TAG file ('
  1142. 'http://www.brynosaurus.com/cachedir/spec.html)')
  1143. exclude_group.add_argument('--exclude-if-present', dest='exclude_if_present',
  1144. metavar='FILENAME', action='append', type=str,
  1145. help='exclude directories that contain the specified file')
  1146. exclude_group.add_argument('--keep-tag-files', dest='keep_tag_files',
  1147. action='store_true', default=False,
  1148. help='keep tag files of excluded caches/directories')
  1149. fs_group = subparser.add_argument_group('Filesystem options')
  1150. fs_group.add_argument('-x', '--one-file-system', dest='one_file_system',
  1151. action='store_true', default=False,
  1152. help='stay in same file system, do not cross mount points')
  1153. fs_group.add_argument('--numeric-owner', dest='numeric_owner',
  1154. action='store_true', default=False,
  1155. help='only store numeric user and group identifiers')
  1156. fs_group.add_argument('--ignore-inode', dest='ignore_inode',
  1157. action='store_true', default=False,
  1158. help='ignore inode data in the file metadata cache used to detect unchanged files.')
  1159. fs_group.add_argument('--read-special', dest='read_special',
  1160. action='store_true', default=False,
  1161. help='open and read special files as if they were regular files')
  1162. archive_group = subparser.add_argument_group('Archive options')
  1163. archive_group.add_argument('--comment', dest='comment', metavar='COMMENT', default='',
  1164. help='add a comment text to the archive')
  1165. archive_group.add_argument('--timestamp', dest='timestamp',
  1166. type=timestamp, default=None,
  1167. metavar='yyyy-mm-ddThh:mm:ss',
  1168. help='manually specify the archive creation date/time (UTC). '
  1169. 'alternatively, give a reference file/directory.')
  1170. archive_group.add_argument('-c', '--checkpoint-interval', dest='checkpoint_interval',
  1171. type=int, default=300, metavar='SECONDS',
  1172. help='write checkpoint every SECONDS seconds (Default: 300)')
  1173. archive_group.add_argument('--chunker-params', dest='chunker_params',
  1174. type=ChunkerParams, default=CHUNKER_PARAMS,
  1175. metavar='CHUNK_MIN_EXP,CHUNK_MAX_EXP,HASH_MASK_BITS,HASH_WINDOW_SIZE',
  1176. help='specify the chunker parameters. default: %d,%d,%d,%d' % CHUNKER_PARAMS)
  1177. archive_group.add_argument('-C', '--compression', dest='compression',
  1178. type=CompressionSpec, default=dict(name='none'), metavar='COMPRESSION',
  1179. help='select compression algorithm (and level):\n'
  1180. 'none == no compression (default),\n'
  1181. 'lz4 == lz4,\n'
  1182. 'zlib == zlib (default level 6),\n'
  1183. 'zlib,0 .. zlib,9 == zlib (with level 0..9),\n'
  1184. 'lzma == lzma (default level 6),\n'
  1185. 'lzma,0 .. lzma,9 == lzma (with level 0..9).')
  1186. subparser.add_argument('location', metavar='ARCHIVE',
  1187. type=location_validator(archive=True),
  1188. help='name of archive to create (must be also a valid directory name)')
  1189. subparser.add_argument('paths', metavar='PATH', nargs='+', type=str,
  1190. help='paths to archive')
  1191. extract_epilog = textwrap.dedent("""
  1192. This command extracts the contents of an archive. By default the entire
  1193. archive is extracted but a subset of files and directories can be selected
  1194. by passing a list of ``PATHs`` as arguments. The file selection can further
  1195. be restricted by using the ``--exclude`` option.
  1196. See the output of the "borg help patterns" command for more help on exclude patterns.
  1197. """)
  1198. subparser = subparsers.add_parser('extract', parents=[common_parser], add_help=False,
  1199. description=self.do_extract.__doc__,
  1200. epilog=extract_epilog,
  1201. formatter_class=argparse.RawDescriptionHelpFormatter,
  1202. help='extract archive contents')
  1203. subparser.set_defaults(func=self.do_extract)
  1204. subparser.add_argument('--list', dest='output_list',
  1205. action='store_true', default=False,
  1206. help='output verbose list of items (files, dirs, ...)')
  1207. subparser.add_argument('-n', '--dry-run', dest='dry_run',
  1208. default=False, action='store_true',
  1209. help='do not actually change any files')
  1210. subparser.add_argument('-e', '--exclude', dest='excludes',
  1211. type=parse_pattern, action='append',
  1212. metavar="PATTERN", help='exclude paths matching PATTERN')
  1213. subparser.add_argument('--exclude-from', dest='exclude_files',
  1214. type=argparse.FileType('r'), action='append',
  1215. metavar='EXCLUDEFILE', help='read exclude patterns from EXCLUDEFILE, one per line')
  1216. subparser.add_argument('--numeric-owner', dest='numeric_owner',
  1217. action='store_true', default=False,
  1218. help='only obey numeric user and group identifiers')
  1219. subparser.add_argument('--strip-components', dest='strip_components',
  1220. type=int, default=0, metavar='NUMBER',
  1221. help='Remove the specified number of leading path elements. Pathnames with fewer elements will be silently skipped.')
  1222. subparser.add_argument('--stdout', dest='stdout',
  1223. action='store_true', default=False,
  1224. help='write all extracted data to stdout')
  1225. subparser.add_argument('--sparse', dest='sparse',
  1226. action='store_true', default=False,
  1227. help='create holes in output sparse file from all-zero chunks')
  1228. subparser.add_argument('location', metavar='ARCHIVE',
  1229. type=location_validator(archive=True),
  1230. help='archive to extract')
  1231. subparser.add_argument('paths', metavar='PATH', nargs='*', type=str,
  1232. help='paths to extract; patterns are supported')
  1233. diff_epilog = textwrap.dedent("""
  1234. This command finds differences in files (contents, user, group, mode) between archives.
  1235. Both archives need to be in the same repository, and a repository location may only
  1236. be specified for ARCHIVE1.
  1237. For archives created with Borg 1.1 or newer diff automatically detects whether
  1238. the archives are created with the same chunker params. If so, only chunk IDs
  1239. are compared, which is very fast.
  1240. For archives prior to Borg 1.1 chunk contents are compared by default.
  1241. If you did not create the archives with different chunker params,
  1242. pass --same-chunker-params.
  1243. Note that the chunker params changed from Borg 0.xx to 1.0.
  1244. See the output of the "borg help patterns" command for more help on exclude patterns.
  1245. """)
  1246. subparser = subparsers.add_parser('diff', parents=[common_parser], add_help=False,
  1247. description=self.do_diff.__doc__,
  1248. epilog=diff_epilog,
  1249. formatter_class=argparse.RawDescriptionHelpFormatter,
  1250. help='find differences in archive contents')
  1251. subparser.set_defaults(func=self.do_diff)
  1252. subparser.add_argument('-e', '--exclude', dest='excludes',
  1253. type=parse_pattern, action='append',
  1254. metavar="PATTERN", help='exclude paths matching PATTERN')
  1255. subparser.add_argument('--exclude-from', dest='exclude_files',
  1256. type=argparse.FileType('r'), action='append',
  1257. metavar='EXCLUDEFILE', help='read exclude patterns from EXCLUDEFILE, one per line')
  1258. subparser.add_argument('--numeric-owner', dest='numeric_owner',
  1259. action='store_true', default=False,
  1260. help='only consider numeric user and group identifiers')
  1261. subparser.add_argument('--same-chunker-params', dest='same_chunker_params',
  1262. action='store_true', default=False,
  1263. help='Override check of chunker parameters.')
  1264. subparser.add_argument('--sort', dest='sort',
  1265. action='store_true', default=False,
  1266. help='Sort the output lines by file path.')
  1267. subparser.add_argument('location', metavar='ARCHIVE1',
  1268. type=location_validator(archive=True),
  1269. help='archive')
  1270. subparser.add_argument('archive2', metavar='ARCHIVE2',
  1271. type=archivename_validator(),
  1272. help='archive to compare with ARCHIVE1 (no repository location)')
  1273. subparser.add_argument('paths', metavar='PATH', nargs='*', type=str,
  1274. help='paths to compare; patterns are supported')
  1275. rename_epilog = textwrap.dedent("""
  1276. This command renames an archive in the repository.
  1277. This results in a different archive ID.
  1278. """)
  1279. subparser = subparsers.add_parser('rename', parents=[common_parser], add_help=False,
  1280. description=self.do_rename.__doc__,
  1281. epilog=rename_epilog,
  1282. formatter_class=argparse.RawDescriptionHelpFormatter,
  1283. help='rename archive')
  1284. subparser.set_defaults(func=self.do_rename)
  1285. subparser.add_argument('location', metavar='ARCHIVE',
  1286. type=location_validator(archive=True),
  1287. help='archive to rename')
  1288. subparser.add_argument('name', metavar='NEWNAME',
  1289. type=archivename_validator(),
  1290. help='the new archive name to use')
  1291. delete_epilog = textwrap.dedent("""
  1292. This command deletes an archive from the repository or the complete repository.
  1293. Disk space is reclaimed accordingly. If you delete the complete repository, the
  1294. local cache for it (if any) is also deleted.
  1295. """)
  1296. subparser = subparsers.add_parser('delete', parents=[common_parser], add_help=False,
  1297. description=self.do_delete.__doc__,
  1298. epilog=delete_epilog,
  1299. formatter_class=argparse.RawDescriptionHelpFormatter,
  1300. help='delete archive')
  1301. subparser.set_defaults(func=self.do_delete)
  1302. subparser.add_argument('-p', '--progress', dest='progress',
  1303. action='store_true', default=False,
  1304. help="""show progress display while deleting a single archive""")
  1305. subparser.add_argument('-s', '--stats', dest='stats',
  1306. action='store_true', default=False,
  1307. help='print statistics for the deleted archive')
  1308. subparser.add_argument('-c', '--cache-only', dest='cache_only',
  1309. action='store_true', default=False,
  1310. help='delete only the local cache for the given repository')
  1311. subparser.add_argument('--save-space', dest='save_space', action='store_true',
  1312. default=False,
  1313. help='work slower, but using less space')
  1314. subparser.add_argument('location', metavar='TARGET', nargs='?', default='',
  1315. type=location_validator(),
  1316. help='archive or repository to delete')
  1317. list_epilog = textwrap.dedent("""
  1318. This command lists the contents of a repository or an archive.
  1319. See the "borg help patterns" command for more help on exclude patterns.
  1320. The following keys are available for --format when listing files:
  1321. """) + ItemFormatter.keys_help()
  1322. subparser = subparsers.add_parser('list', parents=[common_parser], add_help=False,
  1323. description=self.do_list.__doc__,
  1324. epilog=list_epilog,
  1325. formatter_class=argparse.RawDescriptionHelpFormatter,
  1326. help='list archive or repository contents')
  1327. subparser.set_defaults(func=self.do_list)
  1328. subparser.add_argument('--short', dest='short',
  1329. action='store_true', default=False,
  1330. help='only print file/directory names, nothing else')
  1331. subparser.add_argument('--format', '--list-format', dest='format', type=str,
  1332. help="""specify format for file listing
  1333. (default: "{mode} {user:6} {group:6} {size:8d} {isomtime} {path}{extra}{NL}")""")
  1334. subparser.add_argument('-P', '--prefix', dest='prefix', type=str,
  1335. help='only consider archive names starting with this prefix')
  1336. subparser.add_argument('-e', '--exclude', dest='excludes',
  1337. type=parse_pattern, action='append',
  1338. metavar="PATTERN", help='exclude paths matching PATTERN')
  1339. subparser.add_argument('--exclude-from', dest='exclude_files',
  1340. type=argparse.FileType('r'), action='append',
  1341. metavar='EXCLUDEFILE', help='read exclude patterns from EXCLUDEFILE, one per line')
  1342. subparser.add_argument('location', metavar='REPOSITORY_OR_ARCHIVE', nargs='?', default='',
  1343. type=location_validator(),
  1344. help='repository/archive to list contents of')
  1345. subparser.add_argument('paths', metavar='PATH', nargs='*', type=str,
  1346. help='paths to list; patterns are supported')
  1347. mount_epilog = textwrap.dedent("""
  1348. This command mounts an archive as a FUSE filesystem. This can be useful for
  1349. browsing an archive or restoring individual files. Unless the ``--foreground``
  1350. option is given the command will run in the background until the filesystem
  1351. is ``umounted``.
  1352. The command ``borgfs`` provides a wrapper for ``borg mount``. This can also be
  1353. used in fstab entries:
  1354. ``/path/to/repo /mnt/point fuse.borgfs defaults,noauto 0 0``
  1355. To allow a regular user to use fstab entries, add the ``user`` option:
  1356. ``/path/to/repo /mnt/point fuse.borgfs defaults,noauto,user 0 0``
  1357. """)
  1358. subparser = subparsers.add_parser('mount', parents=[common_parser], add_help=False,
  1359. description=self.do_mount.__doc__,
  1360. epilog=mount_epilog,
  1361. formatter_class=argparse.RawDescriptionHelpFormatter,
  1362. help='mount repository')
  1363. subparser.set_defaults(func=self.do_mount)
  1364. subparser.add_argument('location', metavar='REPOSITORY_OR_ARCHIVE', type=location_validator(),
  1365. help='repository/archive to mount')
  1366. subparser.add_argument('mountpoint', metavar='MOUNTPOINT', type=str,
  1367. help='where to mount filesystem')
  1368. subparser.add_argument('-f', '--foreground', dest='foreground',
  1369. action='store_true', default=False,
  1370. help='stay in foreground, do not daemonize')
  1371. subparser.add_argument('-o', dest='options', type=str,
  1372. help='Extra mount options')
  1373. info_epilog = textwrap.dedent("""
  1374. This command displays some detailed information about the specified archive.
  1375. """)
  1376. subparser = subparsers.add_parser('info', parents=[common_parser], add_help=False,
  1377. description=self.do_info.__doc__,
  1378. epilog=info_epilog,
  1379. formatter_class=argparse.RawDescriptionHelpFormatter,
  1380. help='show archive information')
  1381. subparser.set_defaults(func=self.do_info)
  1382. subparser.add_argument('location', metavar='ARCHIVE',
  1383. type=location_validator(archive=True),
  1384. help='archive to display information about')
  1385. break_lock_epilog = textwrap.dedent("""
  1386. This command breaks the repository and cache locks.
  1387. Please use carefully and only while no borg process (on any machine) is
  1388. trying to access the Cache or the Repository.
  1389. """)
  1390. subparser = subparsers.add_parser('break-lock', parents=[common_parser], add_help=False,
  1391. description=self.do_break_lock.__doc__,
  1392. epilog=break_lock_epilog,
  1393. formatter_class=argparse.RawDescriptionHelpFormatter,
  1394. help='break repository and cache locks')
  1395. subparser.set_defaults(func=self.do_break_lock)
  1396. subparser.add_argument('location', metavar='REPOSITORY',
  1397. type=location_validator(archive=False),
  1398. help='repository for which to break the locks')
  1399. prune_epilog = textwrap.dedent("""
  1400. The prune command prunes a repository by deleting archives not matching
  1401. any of the specified retention options. This command is normally used by
  1402. automated backup scripts wanting to keep a certain number of historic backups.
  1403. As an example, "-d 7" means to keep the latest backup on each day, up to 7
  1404. most recent days with backups (days without backups do not count).
  1405. The rules are applied from hourly to yearly, and backups selected by previous
  1406. rules do not count towards those of later rules. The time that each backup
  1407. completes is used for pruning purposes. Dates and times are interpreted in
  1408. the local timezone, and weeks go from Monday to Sunday. Specifying a
  1409. negative number of archives to keep means that there is no limit.
  1410. The "--keep-within" option takes an argument of the form "<int><char>",
  1411. where char is "H", "d", "w", "m", "y". For example, "--keep-within 2d" means
  1412. to keep all archives that were created within the past 48 hours.
  1413. "1m" is taken to mean "31d". The archives kept with this option do not
  1414. count towards the totals specified by any other options.
  1415. If a prefix is set with -P, then only archives that start with the prefix are
  1416. considered for deletion and only those archives count towards the totals
  1417. specified by the rules.
  1418. Otherwise, *all* archives in the repository are candidates for deletion!
  1419. """)
  1420. subparser = subparsers.add_parser('prune', parents=[common_parser], add_help=False,
  1421. description=self.do_prune.__doc__,
  1422. epilog=prune_epilog,
  1423. formatter_class=argparse.RawDescriptionHelpFormatter,
  1424. help='prune archives')
  1425. subparser.set_defaults(func=self.do_prune)
  1426. subparser.add_argument('-n', '--dry-run', dest='dry_run',
  1427. default=False, action='store_true',
  1428. help='do not change repository')
  1429. subparser.add_argument('-s', '--stats', dest='stats',
  1430. action='store_true', default=False,
  1431. help='print statistics for the deleted archive')
  1432. subparser.add_argument('--list', dest='output_list',
  1433. action='store_true', default=False,
  1434. help='output verbose list of archives it keeps/prunes')
  1435. subparser.add_argument('--keep-within', dest='within', type=str, metavar='WITHIN',
  1436. help='keep all archives within this time interval')
  1437. subparser.add_argument('-H', '--keep-hourly', dest='hourly', type=int, default=0,
  1438. help='number of hourly archives to keep')
  1439. subparser.add_argument('-d', '--keep-daily', dest='daily', type=int, default=0,
  1440. help='number of daily archives to keep')
  1441. subparser.add_argument('-w', '--keep-weekly', dest='weekly', type=int, default=0,
  1442. help='number of weekly archives to keep')
  1443. subparser.add_argument('-m', '--keep-monthly', dest='monthly', type=int, default=0,
  1444. help='number of monthly archives to keep')
  1445. subparser.add_argument('-y', '--keep-yearly', dest='yearly', type=int, default=0,
  1446. help='number of yearly archives to keep')
  1447. subparser.add_argument('-P', '--prefix', dest='prefix', type=str,
  1448. help='only consider archive names starting with this prefix')
  1449. subparser.add_argument('--save-space', dest='save_space', action='store_true',
  1450. default=False,
  1451. help='work slower, but using less space')
  1452. subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
  1453. type=location_validator(archive=False),
  1454. help='repository to prune')
  1455. upgrade_epilog = textwrap.dedent("""
  1456. Upgrade an existing Borg repository.
  1457. This currently supports converting an Attic repository to Borg and also
  1458. helps with converting Borg 0.xx to 1.0.
  1459. Currently, only LOCAL repositories can be upgraded (issue #465).
  1460. It will change the magic strings in the repository's segments
  1461. to match the new Borg magic strings. The keyfiles found in
  1462. $ATTIC_KEYS_DIR or ~/.attic/keys/ will also be converted and
  1463. copied to $BORG_KEYS_DIR or ~/.config/borg/keys.
  1464. The cache files are converted, from $ATTIC_CACHE_DIR or
  1465. ~/.cache/attic to $BORG_CACHE_DIR or ~/.cache/borg, but the
  1466. cache layout between Borg and Attic changed, so it is possible
  1467. the first backup after the conversion takes longer than expected
  1468. due to the cache resync.
  1469. Upgrade should be able to resume if interrupted, although it
  1470. will still iterate over all segments. If you want to start
  1471. from scratch, use `borg delete` over the copied repository to
  1472. make sure the cache files are also removed:
  1473. borg delete borg
  1474. Unless ``--inplace`` is specified, the upgrade process first
  1475. creates a backup copy of the repository, in
  1476. REPOSITORY.upgrade-DATETIME, using hardlinks. This takes
  1477. longer than in place upgrades, but is much safer and gives
  1478. progress information (as opposed to ``cp -al``). Once you are
  1479. satisfied with the conversion, you can safely destroy the
  1480. backup copy.
  1481. WARNING: Running the upgrade in place will make the current
  1482. copy unusable with older version, with no way of going back
  1483. to previous versions. This can PERMANENTLY DAMAGE YOUR
  1484. REPOSITORY! Attic CAN NOT READ BORG REPOSITORIES, as the
  1485. magic strings have changed. You have been warned.""")
  1486. subparser = subparsers.add_parser('upgrade', parents=[common_parser], add_help=False,
  1487. description=self.do_upgrade.__doc__,
  1488. epilog=upgrade_epilog,
  1489. formatter_class=argparse.RawDescriptionHelpFormatter,
  1490. help='upgrade repository format')
  1491. subparser.set_defaults(func=self.do_upgrade)
  1492. subparser.add_argument('-p', '--progress', dest='progress',
  1493. action='store_true', default=False,
  1494. help="""show progress display while upgrading the repository""")
  1495. subparser.add_argument('-n', '--dry-run', dest='dry_run',
  1496. default=False, action='store_true',
  1497. help='do not change repository')
  1498. subparser.add_argument('-i', '--inplace', dest='inplace',
  1499. default=False, action='store_true',
  1500. help="""rewrite repository in place, with no chance of going back to older
  1501. versions of the repository.""")
  1502. subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
  1503. type=location_validator(archive=False),
  1504. help='path to the repository to be upgraded')
  1505. recreate_epilog = textwrap.dedent("""
  1506. Recreate the contents of existing archives.
  1507. --exclude, --exclude-from and PATH have the exact same semantics
  1508. as in "borg create". If PATHs are specified the resulting archive
  1509. will only contain files from these PATHs.
  1510. --compression: all chunks seen will be stored using the given method.
  1511. Due to how Borg stores compressed size information this might display
  1512. incorrect information for archives that were not recreated at the same time.
  1513. There is no risk of data loss by this.
  1514. --chunker-params will re-chunk all files in the archive, this can be
  1515. used to have upgraded Borg 0.xx or Attic archives deduplicate with
  1516. Borg 1.x archives.
  1517. borg recreate is signal safe. Send either SIGINT (Ctrl-C on most terminals) or
  1518. SIGTERM to request termination.
  1519. Use the *exact same* command line to resume the operation later - changing excludes
  1520. or paths will lead to inconsistencies (changed excludes will only apply to newly
  1521. processed files/dirs). Changing compression leads to incorrect size information
  1522. (which does not cause any data loss, but can be misleading).
  1523. Changing chunker params between invocations might lead to data loss.
  1524. USE WITH CAUTION.
  1525. Depending on the PATHs and patterns given, recreate can be used to permanently
  1526. delete files from archives.
  1527. When in doubt, use "--dry-run --verbose --list" to see how patterns/PATHS are
  1528. interpreted.
  1529. The archive being recreated is only removed after the operation completes. The
  1530. archive that is built during the operation exists at the same time at
  1531. "<ARCHIVE>.recreate". The new archive will have a different archive ID.
  1532. When rechunking space usage can be substantial, expect at least the entire
  1533. deduplicated size of the archives using the previous chunker params.
  1534. When recompressing approximately 1 % of the repository size or 512 MB
  1535. (whichever is greater) of additional space is used.
  1536. """)
  1537. subparser = subparsers.add_parser('recreate', parents=[common_parser], add_help=False,
  1538. description=self.do_recreate.__doc__,
  1539. epilog=recreate_epilog,
  1540. formatter_class=argparse.RawDescriptionHelpFormatter,
  1541. help=self.do_recreate.__doc__)
  1542. subparser.set_defaults(func=self.do_recreate)
  1543. subparser.add_argument('--list', dest='output_list',
  1544. action='store_true', default=False,
  1545. help='output verbose list of items (files, dirs, ...)')
  1546. subparser.add_argument('--filter', dest='output_filter', metavar='STATUSCHARS',
  1547. help='only display items with the given status characters')
  1548. subparser.add_argument('-p', '--progress', dest='progress',
  1549. action='store_true', default=False,
  1550. help='show progress display while recreating archives')
  1551. subparser.add_argument('-n', '--dry-run', dest='dry_run',
  1552. action='store_true', default=False,
  1553. help='do not change anything')
  1554. subparser.add_argument('-s', '--stats', dest='stats',
  1555. action='store_true', default=False,
  1556. help='print statistics at end')
  1557. exclude_group = subparser.add_argument_group('Exclusion options')
  1558. exclude_group.add_argument('-e', '--exclude', dest='excludes',
  1559. type=parse_pattern, action='append',
  1560. metavar="PATTERN", help='exclude paths matching PATTERN')
  1561. exclude_group.add_argument('--exclude-from', dest='exclude_files',
  1562. type=argparse.FileType('r'), action='append',
  1563. metavar='EXCLUDEFILE', help='read exclude patterns from EXCLUDEFILE, one per line')
  1564. exclude_group.add_argument('--exclude-caches', dest='exclude_caches',
  1565. action='store_true', default=False,
  1566. help='exclude directories that contain a CACHEDIR.TAG file ('
  1567. 'http://www.brynosaurus.com/cachedir/spec.html)')
  1568. exclude_group.add_argument('--exclude-if-present', dest='exclude_if_present',
  1569. metavar='FILENAME', action='append', type=str,
  1570. help='exclude directories that contain the specified file')
  1571. exclude_group.add_argument('--keep-tag-files', dest='keep_tag_files',
  1572. action='store_true', default=False,
  1573. help='keep tag files of excluded caches/directories')
  1574. archive_group = subparser.add_argument_group('Archive options')
  1575. archive_group.add_argument('--comment', dest='comment', metavar='COMMENT', default=None,
  1576. help='add a comment text to the archive')
  1577. archive_group.add_argument('--timestamp', dest='timestamp',
  1578. type=timestamp, default=None,
  1579. metavar='yyyy-mm-ddThh:mm:ss',
  1580. help='manually specify the archive creation date/time (UTC). '
  1581. 'alternatively, give a reference file/directory.')
  1582. archive_group.add_argument('-C', '--compression', dest='compression',
  1583. type=CompressionSpec, default=None, metavar='COMPRESSION',
  1584. help='select compression algorithm (and level):\n'
  1585. 'none == no compression (default),\n'
  1586. 'lz4 == lz4,\n'
  1587. 'zlib == zlib (default level 6),\n'
  1588. 'zlib,0 .. zlib,9 == zlib (with level 0..9),\n'
  1589. 'lzma == lzma (default level 6),\n'
  1590. 'lzma,0 .. lzma,9 == lzma (with level 0..9).')
  1591. archive_group.add_argument('--chunker-params', dest='chunker_params',
  1592. type=ChunkerParams, default=None,
  1593. metavar='CHUNK_MIN_EXP,CHUNK_MAX_EXP,HASH_MASK_BITS,HASH_WINDOW_SIZE',
  1594. help='specify the chunker parameters (or "default").')
  1595. subparser.add_argument('location', metavar='REPOSITORY_OR_ARCHIVE', nargs='?', default='',
  1596. type=location_validator(),
  1597. help='repository/archive to recreate')
  1598. subparser.add_argument('paths', metavar='PATH', nargs='*', type=str,
  1599. help='paths to recreate; patterns are supported')
  1600. subparser = subparsers.add_parser('help', parents=[common_parser], add_help=False,
  1601. description='Extra help')
  1602. subparser.add_argument('--epilog-only', dest='epilog_only',
  1603. action='store_true', default=False)
  1604. subparser.add_argument('--usage-only', dest='usage_only',
  1605. action='store_true', default=False)
  1606. subparser.set_defaults(func=functools.partial(self.do_help, parser, subparsers.choices))
  1607. subparser.add_argument('topic', metavar='TOPIC', type=str, nargs='?',
  1608. help='additional help on TOPIC')
  1609. debug_dump_archive_items_epilog = textwrap.dedent("""
  1610. This command dumps raw (but decrypted and decompressed) archive items (only metadata) to files.
  1611. """)
  1612. subparser = subparsers.add_parser('debug-dump-archive-items', parents=[common_parser], add_help=False,
  1613. description=self.do_debug_dump_archive_items.__doc__,
  1614. epilog=debug_dump_archive_items_epilog,
  1615. formatter_class=argparse.RawDescriptionHelpFormatter,
  1616. help='dump archive items (metadata) (debug)')
  1617. subparser.set_defaults(func=self.do_debug_dump_archive_items)
  1618. subparser.add_argument('location', metavar='ARCHIVE',
  1619. type=location_validator(archive=True),
  1620. help='archive to dump')
  1621. debug_get_obj_epilog = textwrap.dedent("""
  1622. This command gets an object from the repository.
  1623. """)
  1624. subparser = subparsers.add_parser('debug-get-obj', parents=[common_parser], add_help=False,
  1625. description=self.do_debug_get_obj.__doc__,
  1626. epilog=debug_get_obj_epilog,
  1627. formatter_class=argparse.RawDescriptionHelpFormatter,
  1628. help='get object from repository (debug)')
  1629. subparser.set_defaults(func=self.do_debug_get_obj)
  1630. subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
  1631. type=location_validator(archive=False),
  1632. help='repository to use')
  1633. subparser.add_argument('id', metavar='ID', type=str,
  1634. help='hex object ID to get from the repo')
  1635. subparser.add_argument('path', metavar='PATH', type=str,
  1636. help='file to write object data into')
  1637. debug_put_obj_epilog = textwrap.dedent("""
  1638. This command puts objects into the repository.
  1639. """)
  1640. subparser = subparsers.add_parser('debug-put-obj', parents=[common_parser], add_help=False,
  1641. description=self.do_debug_put_obj.__doc__,
  1642. epilog=debug_put_obj_epilog,
  1643. formatter_class=argparse.RawDescriptionHelpFormatter,
  1644. help='put object to repository (debug)')
  1645. subparser.set_defaults(func=self.do_debug_put_obj)
  1646. subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
  1647. type=location_validator(archive=False),
  1648. help='repository to use')
  1649. subparser.add_argument('paths', metavar='PATH', nargs='+', type=str,
  1650. help='file(s) to read and create object(s) from')
  1651. debug_delete_obj_epilog = textwrap.dedent("""
  1652. This command deletes objects from the repository.
  1653. """)
  1654. subparser = subparsers.add_parser('debug-delete-obj', parents=[common_parser], add_help=False,
  1655. description=self.do_debug_delete_obj.__doc__,
  1656. epilog=debug_delete_obj_epilog,
  1657. formatter_class=argparse.RawDescriptionHelpFormatter,
  1658. help='delete object from repository (debug)')
  1659. subparser.set_defaults(func=self.do_debug_delete_obj)
  1660. subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
  1661. type=location_validator(archive=False),
  1662. help='repository to use')
  1663. subparser.add_argument('ids', metavar='IDs', nargs='+', type=str,
  1664. help='hex object ID(s) to delete from the repo')
  1665. return parser
  1666. def get_args(self, argv, cmd):
  1667. """usually, just returns argv, except if we deal with a ssh forced command for borg serve."""
  1668. result = self.parse_args(argv[1:])
  1669. if cmd is not None and result.func == self.do_serve:
  1670. forced_result = result
  1671. argv = shlex.split(cmd)
  1672. result = self.parse_args(argv[1:])
  1673. if result.func != forced_result.func:
  1674. # someone is trying to execute a different borg subcommand, don't do that!
  1675. return forced_result
  1676. # the only thing we take from the forced "borg serve" ssh command is --restrict-to-path
  1677. result.restrict_to_paths = forced_result.restrict_to_paths
  1678. return result
  1679. def parse_args(self, args=None):
  1680. # We can't use argparse for "serve" since we don't want it to show up in "Available commands"
  1681. if args:
  1682. args = self.preprocess_args(args)
  1683. parser = self.build_parser(args)
  1684. args = parser.parse_args(args or ['-h'])
  1685. update_excludes(args)
  1686. return args
  1687. def run(self, args):
  1688. os.umask(args.umask) # early, before opening files
  1689. self.lock_wait = args.lock_wait
  1690. setup_logging(level=args.log_level, is_serve=args.func == self.do_serve) # do not use loggers before this!
  1691. if args.show_version:
  1692. logger.info('borgbackup version %s' % __version__)
  1693. check_extension_modules()
  1694. if is_slow_msgpack():
  1695. logger.warning("Using a pure-python msgpack! This will result in lower performance.")
  1696. return args.func(args)
  1697. def sig_info_handler(signum, stack): # pragma: no cover
  1698. """search the stack for infos about the currently processed file and print them"""
  1699. for frame in inspect.getouterframes(stack):
  1700. func, loc = frame[3], frame[0].f_locals
  1701. if func in ('process_file', '_process', ): # create op
  1702. path = loc['path']
  1703. try:
  1704. pos = loc['fd'].tell()
  1705. total = loc['st'].st_size
  1706. except Exception:
  1707. pos, total = 0, 0
  1708. logger.info("{0} {1}/{2}".format(path, format_file_size(pos), format_file_size(total)))
  1709. break
  1710. if func in ('extract_item', ): # extract op
  1711. path = loc['item'][b'path']
  1712. try:
  1713. pos = loc['fd'].tell()
  1714. except Exception:
  1715. pos = 0
  1716. logger.info("{0} {1}/???".format(path, format_file_size(pos)))
  1717. break
  1718. def setup_signal_handlers(): # pragma: no cover
  1719. sigs = []
  1720. if hasattr(signal, 'SIGUSR1'):
  1721. sigs.append(signal.SIGUSR1) # kill -USR1 pid
  1722. if hasattr(signal, 'SIGINFO'):
  1723. sigs.append(signal.SIGINFO) # kill -INFO pid (or ctrl-t)
  1724. for sig in sigs:
  1725. signal.signal(sig, sig_info_handler)
  1726. def main(): # pragma: no cover
  1727. # provide 'borg mount' behaviour when the main script/executable is named borgfs
  1728. if os.path.basename(sys.argv[0]) == "borgfs":
  1729. sys.argv.insert(1, "mount")
  1730. # Make sure stdout and stderr have errors='replace') to avoid unicode
  1731. # issues when print()-ing unicode file names
  1732. sys.stdout = io.TextIOWrapper(sys.stdout.buffer, sys.stdout.encoding, 'replace', line_buffering=True)
  1733. sys.stderr = io.TextIOWrapper(sys.stderr.buffer, sys.stderr.encoding, 'replace', line_buffering=True)
  1734. setup_signal_handlers()
  1735. archiver = Archiver()
  1736. msg = None
  1737. args = archiver.get_args(sys.argv, os.environ.get('SSH_ORIGINAL_COMMAND'))
  1738. try:
  1739. exit_code = archiver.run(args)
  1740. except Error as e:
  1741. msg = e.get_message()
  1742. if e.traceback:
  1743. msg += "\n%s\n%s" % (traceback.format_exc(), sysinfo())
  1744. exit_code = e.exit_code
  1745. except RemoteRepository.RPCError as e:
  1746. msg = '%s\n%s' % (str(e), sysinfo())
  1747. exit_code = EXIT_ERROR
  1748. except Exception:
  1749. msg = 'Local Exception.\n%s\n%s' % (traceback.format_exc(), sysinfo())
  1750. exit_code = EXIT_ERROR
  1751. except KeyboardInterrupt:
  1752. msg = 'Keyboard interrupt.\n%s\n%s' % (traceback.format_exc(), sysinfo())
  1753. exit_code = EXIT_ERROR
  1754. if msg:
  1755. logger.error(msg)
  1756. if args.show_rc:
  1757. exit_msg = 'terminating with %s status, rc %d'
  1758. if exit_code == EXIT_SUCCESS:
  1759. logger.info(exit_msg % ('success', exit_code))
  1760. elif exit_code == EXIT_WARNING:
  1761. logger.warning(exit_msg % ('warning', exit_code))
  1762. elif exit_code == EXIT_ERROR:
  1763. logger.error(exit_msg % ('error', exit_code))
  1764. else:
  1765. logger.error(exit_msg % ('abnormal', exit_code or 666))
  1766. sys.exit(exit_code)
  1767. if __name__ == '__main__':
  1768. main()