archiver.py 83 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747
  1. from binascii import unhexlify, b2a_base64
  2. from configparser import ConfigParser
  3. import errno
  4. import os
  5. from datetime import datetime
  6. from datetime import timedelta
  7. from io import StringIO
  8. import random
  9. import stat
  10. import subprocess
  11. import sys
  12. import shutil
  13. import tempfile
  14. import time
  15. import unittest
  16. from unittest.mock import patch
  17. from hashlib import sha256
  18. import msgpack
  19. import pytest
  20. from .. import xattr
  21. from ..archive import Archive, ChunkBuffer, CHUNK_MAX_EXP, flags_noatime, flags_normal
  22. from ..archiver import Archiver
  23. from ..cache import Cache
  24. from ..crypto import bytes_to_long, num_aes_blocks
  25. from ..helpers import Manifest, PatternMatcher, parse_pattern, EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR, bin_to_hex, \
  26. get_security_dir
  27. from ..key import RepoKey, KeyfileKey, Passphrase, TAMRequiredError
  28. from ..keymanager import RepoIdMismatch, NotABorgKeyFile
  29. from ..remote import RemoteRepository, PathNotAllowed
  30. from ..repository import Repository
  31. from . import BaseTestCase, changedir, environment_variable, no_selinux
  32. from .platform import fakeroot_detected
  33. from . import key
  34. try:
  35. import llfuse
  36. has_llfuse = True or llfuse # avoids "unused import"
  37. except ImportError:
  38. has_llfuse = False
  39. has_lchflags = hasattr(os, 'lchflags')
  40. src_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
  41. def exec_cmd(*args, archiver=None, fork=False, exe=None, **kw):
  42. if fork:
  43. try:
  44. if exe is None:
  45. borg = (sys.executable, '-m', 'borg.archiver')
  46. elif isinstance(exe, str):
  47. borg = (exe, )
  48. elif not isinstance(exe, tuple):
  49. raise ValueError('exe must be None, a tuple or a str')
  50. output = subprocess.check_output(borg + args, stderr=subprocess.STDOUT)
  51. ret = 0
  52. except subprocess.CalledProcessError as e:
  53. output = e.output
  54. ret = e.returncode
  55. return ret, os.fsdecode(output)
  56. else:
  57. stdin, stdout, stderr = sys.stdin, sys.stdout, sys.stderr
  58. try:
  59. sys.stdin = StringIO()
  60. sys.stdout = sys.stderr = output = StringIO()
  61. if archiver is None:
  62. archiver = Archiver()
  63. archiver.exit_code = EXIT_SUCCESS
  64. args = archiver.parse_args(list(args))
  65. ret = archiver.run(args)
  66. return ret, output.getvalue()
  67. finally:
  68. sys.stdin, sys.stdout, sys.stderr = stdin, stdout, stderr
  69. # check if the binary "borg.exe" is available (for local testing a symlink to virtualenv/bin/borg should do)
  70. try:
  71. exec_cmd('help', exe='borg.exe', fork=True)
  72. BORG_EXES = ['python', 'binary', ]
  73. except FileNotFoundError:
  74. BORG_EXES = ['python', ]
  75. @pytest.fixture(params=BORG_EXES)
  76. def cmd(request):
  77. if request.param == 'python':
  78. exe = None
  79. elif request.param == 'binary':
  80. exe = 'borg.exe'
  81. else:
  82. raise ValueError("param must be 'python' or 'binary'")
  83. def exec_fn(*args, **kw):
  84. return exec_cmd(*args, exe=exe, fork=True, **kw)
  85. return exec_fn
  86. def test_return_codes(cmd, tmpdir):
  87. repo = tmpdir.mkdir('repo')
  88. input = tmpdir.mkdir('input')
  89. output = tmpdir.mkdir('output')
  90. input.join('test_file').write('content')
  91. rc, out = cmd('init', '--encryption=none', '%s' % str(repo))
  92. assert rc == EXIT_SUCCESS
  93. rc, out = cmd('create', '%s::archive' % repo, str(input))
  94. assert rc == EXIT_SUCCESS
  95. with changedir(str(output)):
  96. rc, out = cmd('extract', '%s::archive' % repo)
  97. assert rc == EXIT_SUCCESS
  98. rc, out = cmd('extract', '%s::archive' % repo, 'does/not/match')
  99. assert rc == EXIT_WARNING # pattern did not match
  100. rc, out = cmd('create', '%s::archive' % repo, str(input))
  101. assert rc == EXIT_ERROR # duplicate archive name
  102. """
  103. test_disk_full is very slow and not recommended to be included in daily testing.
  104. for this test, an empty, writable 16MB filesystem mounted on DF_MOUNT is required.
  105. for speed and other reasons, it is recommended that the underlying block device is
  106. in RAM, not a magnetic or flash disk.
  107. assuming /tmp is a tmpfs (in memory filesystem), one can use this:
  108. dd if=/dev/zero of=/tmp/borg-disk bs=16M count=1
  109. mkfs.ext4 /tmp/borg-disk
  110. mkdir /tmp/borg-mount
  111. sudo mount /tmp/borg-disk /tmp/borg-mount
  112. if the directory does not exist, the test will be skipped.
  113. """
  114. DF_MOUNT = '/tmp/borg-mount'
  115. @pytest.mark.skipif(not os.path.exists(DF_MOUNT), reason="needs a 16MB fs mounted on %s" % DF_MOUNT)
  116. def test_disk_full(cmd):
  117. def make_files(dir, count, size, rnd=True):
  118. shutil.rmtree(dir, ignore_errors=True)
  119. os.mkdir(dir)
  120. if rnd:
  121. count = random.randint(1, count)
  122. if size > 1:
  123. size = random.randint(1, size)
  124. for i in range(count):
  125. fn = os.path.join(dir, "file%03d" % i)
  126. with open(fn, 'wb') as f:
  127. data = os.urandom(size)
  128. f.write(data)
  129. with environment_variable(BORG_CHECK_I_KNOW_WHAT_I_AM_DOING='YES'):
  130. mount = DF_MOUNT
  131. assert os.path.exists(mount)
  132. repo = os.path.join(mount, 'repo')
  133. input = os.path.join(mount, 'input')
  134. reserve = os.path.join(mount, 'reserve')
  135. for j in range(100):
  136. shutil.rmtree(repo, ignore_errors=True)
  137. shutil.rmtree(input, ignore_errors=True)
  138. # keep some space and some inodes in reserve that we can free up later:
  139. make_files(reserve, 80, 100000, rnd=False)
  140. rc, out = cmd('init', repo)
  141. if rc != EXIT_SUCCESS:
  142. print('init', rc, out)
  143. assert rc == EXIT_SUCCESS
  144. try:
  145. success, i = True, 0
  146. while success:
  147. i += 1
  148. try:
  149. make_files(input, 20, 200000)
  150. except OSError as err:
  151. if err.errno == errno.ENOSPC:
  152. # already out of space
  153. break
  154. raise
  155. try:
  156. rc, out = cmd('create', '%s::test%03d' % (repo, i), input)
  157. success = rc == EXIT_SUCCESS
  158. if not success:
  159. print('create', rc, out)
  160. finally:
  161. # make sure repo is not locked
  162. shutil.rmtree(os.path.join(repo, 'lock.exclusive'), ignore_errors=True)
  163. os.remove(os.path.join(repo, 'lock.roster'))
  164. finally:
  165. # now some error happened, likely we are out of disk space.
  166. # free some space so we can expect borg to be able to work normally:
  167. shutil.rmtree(reserve, ignore_errors=True)
  168. rc, out = cmd('list', repo)
  169. if rc != EXIT_SUCCESS:
  170. print('list', rc, out)
  171. rc, out = cmd('check', '--repair', repo)
  172. if rc != EXIT_SUCCESS:
  173. print('check', rc, out)
  174. assert rc == EXIT_SUCCESS
  175. class ArchiverTestCaseBase(BaseTestCase):
  176. EXE = None # python source based
  177. FORK_DEFAULT = False
  178. prefix = ''
  179. def setUp(self):
  180. os.environ['BORG_CHECK_I_KNOW_WHAT_I_AM_DOING'] = 'YES'
  181. os.environ['BORG_DELETE_I_KNOW_WHAT_I_AM_DOING'] = 'YES'
  182. os.environ['BORG_PASSPHRASE'] = 'waytooeasyonlyfortests'
  183. self.archiver = not self.FORK_DEFAULT and Archiver() or None
  184. self.tmpdir = tempfile.mkdtemp()
  185. self.repository_path = os.path.join(self.tmpdir, 'repository')
  186. self.repository_location = self.prefix + self.repository_path
  187. self.input_path = os.path.join(self.tmpdir, 'input')
  188. self.output_path = os.path.join(self.tmpdir, 'output')
  189. self.keys_path = os.path.join(self.tmpdir, 'keys')
  190. self.cache_path = os.path.join(self.tmpdir, 'cache')
  191. self.exclude_file_path = os.path.join(self.tmpdir, 'excludes')
  192. os.environ['BORG_KEYS_DIR'] = self.keys_path
  193. os.environ['BORG_CACHE_DIR'] = self.cache_path
  194. os.mkdir(self.input_path)
  195. os.chmod(self.input_path, 0o777) # avoid troubles with fakeroot / FUSE
  196. os.mkdir(self.output_path)
  197. os.mkdir(self.keys_path)
  198. os.mkdir(self.cache_path)
  199. with open(self.exclude_file_path, 'wb') as fd:
  200. fd.write(b'input/file2\n# A comment line, then a blank line\n\n')
  201. self._old_wd = os.getcwd()
  202. os.chdir(self.tmpdir)
  203. def tearDown(self):
  204. os.chdir(self._old_wd)
  205. # note: ignore_errors=True as workaround for issue #862
  206. shutil.rmtree(self.tmpdir, ignore_errors=True)
  207. def cmd(self, *args, **kw):
  208. exit_code = kw.pop('exit_code', 0)
  209. fork = kw.pop('fork', None)
  210. if fork is None:
  211. fork = self.FORK_DEFAULT
  212. ret, output = exec_cmd(*args, fork=fork, exe=self.EXE, archiver=self.archiver, **kw)
  213. if ret != exit_code:
  214. print(output)
  215. self.assert_equal(ret, exit_code)
  216. return output
  217. def create_src_archive(self, name):
  218. self.cmd('create', self.repository_location + '::' + name, src_dir)
  219. def open_archive(self, name):
  220. repository = Repository(self.repository_path, exclusive=True)
  221. with repository:
  222. manifest, key = Manifest.load(repository)
  223. archive = Archive(repository, key, manifest, name)
  224. return archive, repository
  225. class ArchiverTestCase(ArchiverTestCaseBase):
  226. def create_regular_file(self, name, size=0, contents=None):
  227. filename = os.path.join(self.input_path, name)
  228. if not os.path.exists(os.path.dirname(filename)):
  229. os.makedirs(os.path.dirname(filename))
  230. with open(filename, 'wb') as fd:
  231. if contents is None:
  232. contents = b'X' * size
  233. fd.write(contents)
  234. def create_test_files(self):
  235. """Create a minimal test case including all supported file types
  236. """
  237. # File
  238. self.create_regular_file('empty', size=0)
  239. # next code line raises OverflowError on 32bit cpu (raspberry pi 2):
  240. # 2600-01-01 > 2**64 ns
  241. # os.utime('input/empty', (19880895600, 19880895600))
  242. # thus, we better test with something not that far in future:
  243. # 2038-01-19 (1970 + 2^31 - 1 seconds) is the 32bit "deadline":
  244. os.utime('input/empty', (2**31 - 1, 2**31 - 1))
  245. self.create_regular_file('file1', size=1024 * 80)
  246. self.create_regular_file('flagfile', size=1024)
  247. # Directory
  248. self.create_regular_file('dir2/file2', size=1024 * 80)
  249. # File mode
  250. os.chmod('input/file1', 0o4755)
  251. # Hard link
  252. os.link(os.path.join(self.input_path, 'file1'),
  253. os.path.join(self.input_path, 'hardlink'))
  254. # Symlink
  255. os.symlink('somewhere', os.path.join(self.input_path, 'link1'))
  256. self.create_regular_file('fusexattr', size=1)
  257. if not xattr.XATTR_FAKEROOT and xattr.is_enabled(self.input_path):
  258. # ironically, due to the way how fakeroot works, comparing fuse file xattrs to orig file xattrs
  259. # will FAIL if fakeroot supports xattrs, thus we only set the xattr if XATTR_FAKEROOT is False.
  260. # This is because fakeroot with xattr-support does not propagate xattrs of the underlying file
  261. # into "fakeroot space". Because the xattrs exposed by borgfs are these of an underlying file
  262. # (from fakeroots point of view) they are invisible to the test process inside the fakeroot.
  263. xattr.setxattr(os.path.join(self.input_path, 'fusexattr'), 'user.foo', b'bar')
  264. # XXX this always fails for me
  265. # ubuntu 14.04, on a TMP dir filesystem with user_xattr, using fakeroot
  266. # same for newer ubuntu and centos.
  267. # if this is supported just on specific platform, platform should be checked first,
  268. # so that the test setup for all tests using it does not fail here always for others.
  269. # xattr.setxattr(os.path.join(self.input_path, 'link1'), 'user.foo_symlink', b'bar_symlink', follow_symlinks=False)
  270. # FIFO node
  271. os.mkfifo(os.path.join(self.input_path, 'fifo1'))
  272. if has_lchflags:
  273. os.lchflags(os.path.join(self.input_path, 'flagfile'), stat.UF_NODUMP)
  274. try:
  275. # Block device
  276. os.mknod('input/bdev', 0o600 | stat.S_IFBLK, os.makedev(10, 20))
  277. # Char device
  278. os.mknod('input/cdev', 0o600 | stat.S_IFCHR, os.makedev(30, 40))
  279. # File mode
  280. os.chmod('input/dir2', 0o555) # if we take away write perms, we need root to remove contents
  281. # File owner
  282. os.chown('input/file1', 100, 200) # raises OSError invalid argument on cygwin
  283. have_root = True # we have (fake)root
  284. except PermissionError:
  285. have_root = False
  286. except OSError as e:
  287. # Note: ENOSYS "Function not implemented" happens as non-root on Win 10 Linux Subsystem.
  288. if e.errno not in (errno.EINVAL, errno.ENOSYS):
  289. raise
  290. have_root = False
  291. return have_root
  292. def test_basic_functionality(self):
  293. have_root = self.create_test_files()
  294. self.cmd('init', self.repository_location)
  295. self.cmd('create', self.repository_location + '::test', 'input')
  296. self.cmd('create', '--stats', self.repository_location + '::test.2', 'input')
  297. with changedir('output'):
  298. self.cmd('extract', self.repository_location + '::test')
  299. list_output = self.cmd('list', '--short', self.repository_location)
  300. self.assert_in('test', list_output)
  301. self.assert_in('test.2', list_output)
  302. expected = [
  303. 'input',
  304. 'input/bdev',
  305. 'input/cdev',
  306. 'input/dir2',
  307. 'input/dir2/file2',
  308. 'input/empty',
  309. 'input/fifo1',
  310. 'input/file1',
  311. 'input/flagfile',
  312. 'input/hardlink',
  313. 'input/link1',
  314. ]
  315. if not have_root:
  316. # we could not create these device files without (fake)root
  317. expected.remove('input/bdev')
  318. expected.remove('input/cdev')
  319. if has_lchflags:
  320. # remove the file we did not backup, so input and output become equal
  321. expected.remove('input/flagfile') # this file is UF_NODUMP
  322. os.remove(os.path.join('input', 'flagfile'))
  323. list_output = self.cmd('list', '--short', self.repository_location + '::test')
  324. for name in expected:
  325. self.assert_in(name, list_output)
  326. self.assert_dirs_equal('input', 'output/input')
  327. info_output = self.cmd('info', self.repository_location + '::test')
  328. item_count = 4 if has_lchflags else 5 # one file is UF_NODUMP
  329. self.assert_in('Number of files: %d' % item_count, info_output)
  330. shutil.rmtree(self.cache_path)
  331. with environment_variable(BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK='yes'):
  332. info_output2 = self.cmd('info', self.repository_location + '::test')
  333. def filter(output):
  334. # filter for interesting "info" output, ignore cache rebuilding related stuff
  335. prefixes = ['Name:', 'Fingerprint:', 'Number of files:', 'This archive:',
  336. 'All archives:', 'Chunk index:', ]
  337. result = []
  338. for line in output.splitlines():
  339. for prefix in prefixes:
  340. if line.startswith(prefix):
  341. result.append(line)
  342. return '\n'.join(result)
  343. # the interesting parts of info_output2 and info_output should be same
  344. self.assert_equal(filter(info_output), filter(info_output2))
  345. # Search for O_NOATIME there: https://www.gnu.org/software/hurd/contributing.html - we just
  346. # skip the test on Hurd, it is not critical anyway, just testing a performance optimization.
  347. @pytest.mark.skipif(sys.platform == 'gnu0', reason="O_NOATIME is strangely broken on GNU Hurd")
  348. def test_atime(self):
  349. def has_noatime(some_file):
  350. atime_before = os.stat(some_file).st_atime_ns
  351. try:
  352. os.close(os.open(some_file, flags_noatime))
  353. except PermissionError:
  354. return False
  355. else:
  356. atime_after = os.stat(some_file).st_atime_ns
  357. noatime_used = flags_noatime != flags_normal
  358. return noatime_used and atime_before == atime_after
  359. self.create_test_files()
  360. atime, mtime = 123456780, 234567890
  361. have_noatime = has_noatime('input/file1')
  362. os.utime('input/file1', (atime, mtime))
  363. self.cmd('init', self.repository_location)
  364. self.cmd('create', self.repository_location + '::test', 'input')
  365. with changedir('output'):
  366. self.cmd('extract', self.repository_location + '::test')
  367. sti = os.stat('input/file1')
  368. sto = os.stat('output/input/file1')
  369. assert sti.st_mtime_ns == sto.st_mtime_ns == mtime * 1e9
  370. if have_noatime:
  371. assert sti.st_atime_ns == sto.st_atime_ns == atime * 1e9
  372. else:
  373. # it touched the input file's atime while backing it up
  374. assert sto.st_atime_ns == atime * 1e9
  375. def _extract_repository_id(self, path):
  376. with Repository(self.repository_path) as repository:
  377. return repository.id
  378. def _set_repository_id(self, path, id):
  379. config = ConfigParser(interpolation=None)
  380. config.read(os.path.join(path, 'config'))
  381. config.set('repository', 'id', bin_to_hex(id))
  382. with open(os.path.join(path, 'config'), 'w') as fd:
  383. config.write(fd)
  384. with Repository(self.repository_path) as repository:
  385. return repository.id
  386. def test_sparse_file(self):
  387. def is_sparse(fn, total_size, hole_size):
  388. st = os.stat(fn)
  389. assert st.st_size == total_size
  390. sparse = True
  391. if sparse and hasattr(st, 'st_blocks') and st.st_blocks * 512 >= st.st_size:
  392. sparse = False
  393. if sparse and hasattr(os, 'SEEK_HOLE') and hasattr(os, 'SEEK_DATA'):
  394. with open(fn, 'rb') as fd:
  395. # only check if the first hole is as expected, because the 2nd hole check
  396. # is problematic on xfs due to its "dynamic speculative EOF preallocation
  397. try:
  398. if fd.seek(0, os.SEEK_HOLE) != 0:
  399. sparse = False
  400. if fd.seek(0, os.SEEK_DATA) != hole_size:
  401. sparse = False
  402. except OSError:
  403. # OS/FS does not really support SEEK_HOLE/SEEK_DATA
  404. sparse = False
  405. return sparse
  406. filename = os.path.join(self.input_path, 'sparse')
  407. content = b'foobar'
  408. hole_size = 5 * (1 << CHUNK_MAX_EXP) # 5 full chunker buffers
  409. total_size = hole_size + len(content) + hole_size
  410. with open(filename, 'wb') as fd:
  411. # create a file that has a hole at the beginning and end (if the
  412. # OS and filesystem supports sparse files)
  413. fd.seek(hole_size, 1)
  414. fd.write(content)
  415. fd.seek(hole_size, 1)
  416. pos = fd.tell()
  417. fd.truncate(pos)
  418. # we first check if we could create a sparse input file:
  419. sparse_support = is_sparse(filename, total_size, hole_size)
  420. if sparse_support:
  421. # we could create a sparse input file, so creating a backup of it and
  422. # extracting it again (as sparse) should also work:
  423. self.cmd('init', self.repository_location)
  424. self.cmd('create', self.repository_location + '::test', 'input')
  425. with changedir(self.output_path):
  426. self.cmd('extract', '--sparse', self.repository_location + '::test')
  427. self.assert_dirs_equal('input', 'output/input')
  428. filename = os.path.join(self.output_path, 'input', 'sparse')
  429. with open(filename, 'rb') as fd:
  430. # check if file contents are as expected
  431. self.assert_equal(fd.read(hole_size), b'\0' * hole_size)
  432. self.assert_equal(fd.read(len(content)), content)
  433. self.assert_equal(fd.read(hole_size), b'\0' * hole_size)
  434. self.assert_true(is_sparse(filename, total_size, hole_size))
  435. def test_unusual_filenames(self):
  436. filenames = ['normal', 'with some blanks', '(with_parens)', ]
  437. for filename in filenames:
  438. filename = os.path.join(self.input_path, filename)
  439. with open(filename, 'wb'):
  440. pass
  441. self.cmd('init', self.repository_location)
  442. self.cmd('create', self.repository_location + '::test', 'input')
  443. for filename in filenames:
  444. with changedir('output'):
  445. self.cmd('extract', self.repository_location + '::test', os.path.join('input', filename))
  446. assert os.path.exists(os.path.join('output', 'input', filename))
  447. def test_repository_swap_detection(self):
  448. self.create_test_files()
  449. os.environ['BORG_PASSPHRASE'] = 'passphrase'
  450. self.cmd('init', '--encryption=repokey', self.repository_location)
  451. repository_id = self._extract_repository_id(self.repository_path)
  452. self.cmd('create', self.repository_location + '::test', 'input')
  453. shutil.rmtree(self.repository_path)
  454. self.cmd('init', '--encryption=none', self.repository_location)
  455. self._set_repository_id(self.repository_path, repository_id)
  456. self.assert_equal(repository_id, self._extract_repository_id(self.repository_path))
  457. if self.FORK_DEFAULT:
  458. self.cmd('create', self.repository_location + '::test.2', 'input', exit_code=EXIT_ERROR)
  459. else:
  460. self.assert_raises(Cache.EncryptionMethodMismatch, lambda: self.cmd('create', self.repository_location + '::test.2', 'input'))
  461. def test_repository_swap_detection2(self):
  462. self.create_test_files()
  463. self.cmd('init', '--encryption=none', self.repository_location + '_unencrypted')
  464. os.environ['BORG_PASSPHRASE'] = 'passphrase'
  465. self.cmd('init', '--encryption=repokey', self.repository_location + '_encrypted')
  466. self.cmd('create', self.repository_location + '_encrypted::test', 'input')
  467. shutil.rmtree(self.repository_path + '_encrypted')
  468. os.rename(self.repository_path + '_unencrypted', self.repository_path + '_encrypted')
  469. if self.FORK_DEFAULT:
  470. self.cmd('create', self.repository_location + '_encrypted::test.2', 'input', exit_code=EXIT_ERROR)
  471. else:
  472. self.assert_raises(Cache.RepositoryAccessAborted, lambda: self.cmd('create', self.repository_location + '_encrypted::test.2', 'input'))
  473. def test_strip_components(self):
  474. self.cmd('init', self.repository_location)
  475. self.create_regular_file('dir/file')
  476. self.cmd('create', self.repository_location + '::test', 'input')
  477. with changedir('output'):
  478. self.cmd('extract', self.repository_location + '::test', '--strip-components', '3')
  479. self.assert_true(not os.path.exists('file'))
  480. with self.assert_creates_file('file'):
  481. self.cmd('extract', self.repository_location + '::test', '--strip-components', '2')
  482. with self.assert_creates_file('dir/file'):
  483. self.cmd('extract', self.repository_location + '::test', '--strip-components', '1')
  484. with self.assert_creates_file('input/dir/file'):
  485. self.cmd('extract', self.repository_location + '::test', '--strip-components', '0')
  486. def test_extract_include_exclude(self):
  487. self.cmd('init', self.repository_location)
  488. self.create_regular_file('file1', size=1024 * 80)
  489. self.create_regular_file('file2', size=1024 * 80)
  490. self.create_regular_file('file3', size=1024 * 80)
  491. self.create_regular_file('file4', size=1024 * 80)
  492. self.cmd('create', '--exclude=input/file4', self.repository_location + '::test', 'input')
  493. with changedir('output'):
  494. self.cmd('extract', self.repository_location + '::test', 'input/file1', )
  495. self.assert_equal(sorted(os.listdir('output/input')), ['file1'])
  496. with changedir('output'):
  497. self.cmd('extract', '--exclude=input/file2', self.repository_location + '::test')
  498. self.assert_equal(sorted(os.listdir('output/input')), ['file1', 'file3'])
  499. with changedir('output'):
  500. self.cmd('extract', '--exclude-from=' + self.exclude_file_path, self.repository_location + '::test')
  501. self.assert_equal(sorted(os.listdir('output/input')), ['file1', 'file3'])
  502. def test_extract_include_exclude_regex(self):
  503. self.cmd('init', self.repository_location)
  504. self.create_regular_file('file1', size=1024 * 80)
  505. self.create_regular_file('file2', size=1024 * 80)
  506. self.create_regular_file('file3', size=1024 * 80)
  507. self.create_regular_file('file4', size=1024 * 80)
  508. self.create_regular_file('file333', size=1024 * 80)
  509. # Create with regular expression exclusion for file4
  510. self.cmd('create', '--exclude=re:input/file4$', self.repository_location + '::test', 'input')
  511. with changedir('output'):
  512. self.cmd('extract', self.repository_location + '::test')
  513. self.assert_equal(sorted(os.listdir('output/input')), ['file1', 'file2', 'file3', 'file333'])
  514. shutil.rmtree('output/input')
  515. # Extract with regular expression exclusion
  516. with changedir('output'):
  517. self.cmd('extract', '--exclude=re:file3+', self.repository_location + '::test')
  518. self.assert_equal(sorted(os.listdir('output/input')), ['file1', 'file2'])
  519. shutil.rmtree('output/input')
  520. # Combine --exclude with fnmatch and regular expression
  521. with changedir('output'):
  522. self.cmd('extract', '--exclude=input/file2', '--exclude=re:file[01]', self.repository_location + '::test')
  523. self.assert_equal(sorted(os.listdir('output/input')), ['file3', 'file333'])
  524. shutil.rmtree('output/input')
  525. # Combine --exclude-from and regular expression exclusion
  526. with changedir('output'):
  527. self.cmd('extract', '--exclude-from=' + self.exclude_file_path, '--exclude=re:file1',
  528. '--exclude=re:file(\\d)\\1\\1$', self.repository_location + '::test')
  529. self.assert_equal(sorted(os.listdir('output/input')), ['file3'])
  530. def test_extract_include_exclude_regex_from_file(self):
  531. self.cmd('init', self.repository_location)
  532. self.create_regular_file('file1', size=1024 * 80)
  533. self.create_regular_file('file2', size=1024 * 80)
  534. self.create_regular_file('file3', size=1024 * 80)
  535. self.create_regular_file('file4', size=1024 * 80)
  536. self.create_regular_file('file333', size=1024 * 80)
  537. self.create_regular_file('aa:something', size=1024 * 80)
  538. # Create while excluding using mixed pattern styles
  539. with open(self.exclude_file_path, 'wb') as fd:
  540. fd.write(b're:input/file4$\n')
  541. fd.write(b'fm:*aa:*thing\n')
  542. self.cmd('create', '--exclude-from=' + self.exclude_file_path, self.repository_location + '::test', 'input')
  543. with changedir('output'):
  544. self.cmd('extract', self.repository_location + '::test')
  545. self.assert_equal(sorted(os.listdir('output/input')), ['file1', 'file2', 'file3', 'file333'])
  546. shutil.rmtree('output/input')
  547. # Exclude using regular expression
  548. with open(self.exclude_file_path, 'wb') as fd:
  549. fd.write(b're:file3+\n')
  550. with changedir('output'):
  551. self.cmd('extract', '--exclude-from=' + self.exclude_file_path, self.repository_location + '::test')
  552. self.assert_equal(sorted(os.listdir('output/input')), ['file1', 'file2'])
  553. shutil.rmtree('output/input')
  554. # Mixed exclude pattern styles
  555. with open(self.exclude_file_path, 'wb') as fd:
  556. fd.write(b're:file(\\d)\\1\\1$\n')
  557. fd.write(b'fm:nothingwillmatchthis\n')
  558. fd.write(b'*/file1\n')
  559. fd.write(b're:file2$\n')
  560. with changedir('output'):
  561. self.cmd('extract', '--exclude-from=' + self.exclude_file_path, self.repository_location + '::test')
  562. self.assert_equal(sorted(os.listdir('output/input')), ['file3'])
  563. def test_extract_with_pattern(self):
  564. self.cmd("init", self.repository_location)
  565. self.create_regular_file("file1", size=1024 * 80)
  566. self.create_regular_file("file2", size=1024 * 80)
  567. self.create_regular_file("file3", size=1024 * 80)
  568. self.create_regular_file("file4", size=1024 * 80)
  569. self.create_regular_file("file333", size=1024 * 80)
  570. self.cmd("create", self.repository_location + "::test", "input")
  571. # Extract everything with regular expression
  572. with changedir("output"):
  573. self.cmd("extract", self.repository_location + "::test", "re:.*")
  574. self.assert_equal(sorted(os.listdir("output/input")), ["file1", "file2", "file3", "file333", "file4"])
  575. shutil.rmtree("output/input")
  576. # Extract with pattern while also excluding files
  577. with changedir("output"):
  578. self.cmd("extract", "--exclude=re:file[34]$", self.repository_location + "::test", r"re:file\d$")
  579. self.assert_equal(sorted(os.listdir("output/input")), ["file1", "file2"])
  580. shutil.rmtree("output/input")
  581. # Combine --exclude with pattern for extraction
  582. with changedir("output"):
  583. self.cmd("extract", "--exclude=input/file1", self.repository_location + "::test", "re:file[12]$")
  584. self.assert_equal(sorted(os.listdir("output/input")), ["file2"])
  585. shutil.rmtree("output/input")
  586. # Multiple pattern
  587. with changedir("output"):
  588. self.cmd("extract", self.repository_location + "::test", "fm:input/file1", "fm:*file33*", "input/file2")
  589. self.assert_equal(sorted(os.listdir("output/input")), ["file1", "file2", "file333"])
  590. def test_create_without_root(self):
  591. """test create without a root"""
  592. self.cmd('init', self.repository_location)
  593. args = [ 'create', self.repository_location + '::test' ]
  594. if self.FORK_DEFAULT:
  595. output = self.cmd(*args, exit_code=2)
  596. else:
  597. self.assert_raises(SystemExit, lambda: self.cmd(*args))
  598. def test_create_pattern_root(self):
  599. """test create with only a root pattern"""
  600. self.cmd('init', self.repository_location)
  601. self.create_regular_file('file1', size=1024 * 80)
  602. self.create_regular_file('file2', size=1024 * 80)
  603. output = self.cmd('create', '-v', '--list', '--pattern=R input', self.repository_location + '::test')
  604. self.assert_in("A input/file1", output)
  605. self.assert_in("A input/file2", output)
  606. def test_create_pattern(self):
  607. """test file patterns during create"""
  608. self.cmd('init', self.repository_location)
  609. self.create_regular_file('file1', size=1024 * 80)
  610. self.create_regular_file('file2', size=1024 * 80)
  611. self.create_regular_file('file_important', size=1024 * 80)
  612. output = self.cmd('create', '-v', '--list',
  613. '--pattern=+input/file_important', '--pattern=-input/file*',
  614. self.repository_location + '::test', 'input')
  615. self.assert_in("A input/file_important", output)
  616. self.assert_in("A input/file_important", output)
  617. self.assert_not_in('file1', output)
  618. self.assert_not_in('file2', output)
  619. def test_extract_pattern_opt(self):
  620. self.cmd('init', self.repository_location)
  621. self.create_regular_file('file1', size=1024 * 80)
  622. self.create_regular_file('file2', size=1024 * 80)
  623. self.create_regular_file('file_important', size=1024 * 80)
  624. self.cmd('create', self.repository_location + '::test', 'input')
  625. with changedir('output'):
  626. self.cmd('extract',
  627. '--pattern=+input/file_important', '--pattern=-input/file*',
  628. self.repository_location + '::test')
  629. self.assert_equal(sorted(os.listdir('output/input')), ['file_important'])
  630. def test_exclude_caches(self):
  631. self.cmd('init', self.repository_location)
  632. self.create_regular_file('file1', size=1024 * 80)
  633. self.create_regular_file('cache1/CACHEDIR.TAG', contents=b'Signature: 8a477f597d28d172789f06886806bc55 extra stuff')
  634. self.create_regular_file('cache2/CACHEDIR.TAG', contents=b'invalid signature')
  635. self.cmd('create', '--exclude-caches', self.repository_location + '::test', 'input')
  636. with changedir('output'):
  637. self.cmd('extract', self.repository_location + '::test')
  638. self.assert_equal(sorted(os.listdir('output/input')), ['cache2', 'file1'])
  639. self.assert_equal(sorted(os.listdir('output/input/cache2')), ['CACHEDIR.TAG'])
  640. def test_exclude_tagged(self):
  641. self.cmd('init', self.repository_location)
  642. self.create_regular_file('file1', size=1024 * 80)
  643. self.create_regular_file('tagged1/.NOBACKUP')
  644. self.create_regular_file('tagged2/00-NOBACKUP')
  645. self.create_regular_file('tagged3/.NOBACKUP/file2')
  646. self.cmd('create', '--exclude-if-present', '.NOBACKUP', '--exclude-if-present', '00-NOBACKUP', self.repository_location + '::test', 'input')
  647. with changedir('output'):
  648. self.cmd('extract', self.repository_location + '::test')
  649. self.assert_equal(sorted(os.listdir('output/input')), ['file1', 'tagged3'])
  650. def test_exclude_keep_tagged(self):
  651. self.cmd('init', self.repository_location)
  652. self.create_regular_file('file0', size=1024)
  653. self.create_regular_file('tagged1/.NOBACKUP1')
  654. self.create_regular_file('tagged1/file1', size=1024)
  655. self.create_regular_file('tagged2/.NOBACKUP2')
  656. self.create_regular_file('tagged2/file2', size=1024)
  657. self.create_regular_file('tagged3/CACHEDIR.TAG', contents=b'Signature: 8a477f597d28d172789f06886806bc55 extra stuff')
  658. self.create_regular_file('tagged3/file3', size=1024)
  659. self.create_regular_file('taggedall/.NOBACKUP1')
  660. self.create_regular_file('taggedall/.NOBACKUP2')
  661. self.create_regular_file('taggedall/CACHEDIR.TAG', contents=b'Signature: 8a477f597d28d172789f06886806bc55 extra stuff')
  662. self.create_regular_file('taggedall/file4', size=1024)
  663. self.cmd('create', '--exclude-if-present', '.NOBACKUP1', '--exclude-if-present', '.NOBACKUP2',
  664. '--exclude-caches', '--keep-tag-files', self.repository_location + '::test', 'input')
  665. with changedir('output'):
  666. self.cmd('extract', self.repository_location + '::test')
  667. self.assert_equal(sorted(os.listdir('output/input')), ['file0', 'tagged1', 'tagged2', 'tagged3', 'taggedall'])
  668. self.assert_equal(os.listdir('output/input/tagged1'), ['.NOBACKUP1'])
  669. self.assert_equal(os.listdir('output/input/tagged2'), ['.NOBACKUP2'])
  670. self.assert_equal(os.listdir('output/input/tagged3'), ['CACHEDIR.TAG'])
  671. self.assert_equal(sorted(os.listdir('output/input/taggedall')),
  672. ['.NOBACKUP1', '.NOBACKUP2', 'CACHEDIR.TAG', ])
  673. @pytest.mark.skipif(not xattr.XATTR_FAKEROOT, reason='Linux capabilities test, requires fakeroot >= 1.20.2')
  674. def test_extract_capabilities(self):
  675. fchown = os.fchown
  676. # We need to manually patch chown to get the behaviour Linux has, since fakeroot does not
  677. # accurately model the interaction of chown(2) and Linux capabilities, i.e. it does not remove them.
  678. def patched_fchown(fd, uid, gid):
  679. xattr.setxattr(fd, 'security.capability', None, follow_symlinks=False)
  680. fchown(fd, uid, gid)
  681. # The capability descriptor used here is valid and taken from a /usr/bin/ping
  682. capabilities = b'\x01\x00\x00\x02\x00 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
  683. self.create_regular_file('file')
  684. xattr.setxattr('input/file', 'security.capability', capabilities)
  685. self.cmd('init', self.repository_location)
  686. self.cmd('create', self.repository_location + '::test', 'input')
  687. with changedir('output'):
  688. with patch.object(os, 'fchown', patched_fchown):
  689. self.cmd('extract', self.repository_location + '::test')
  690. assert xattr.getxattr('input/file', 'security.capability') == capabilities
  691. def test_path_normalization(self):
  692. self.cmd('init', self.repository_location)
  693. self.create_regular_file('dir1/dir2/file', size=1024 * 80)
  694. with changedir('input/dir1/dir2'):
  695. self.cmd('create', self.repository_location + '::test', '../../../input/dir1/../dir1/dir2/..')
  696. output = self.cmd('list', self.repository_location + '::test')
  697. self.assert_not_in('..', output)
  698. self.assert_in(' input/dir1/dir2/file', output)
  699. def test_exclude_normalization(self):
  700. self.cmd('init', self.repository_location)
  701. self.create_regular_file('file1', size=1024 * 80)
  702. self.create_regular_file('file2', size=1024 * 80)
  703. with changedir('input'):
  704. self.cmd('create', '--exclude=file1', self.repository_location + '::test1', '.')
  705. with changedir('output'):
  706. self.cmd('extract', self.repository_location + '::test1')
  707. self.assert_equal(sorted(os.listdir('output')), ['file2'])
  708. with changedir('input'):
  709. self.cmd('create', '--exclude=./file1', self.repository_location + '::test2', '.')
  710. with changedir('output'):
  711. self.cmd('extract', self.repository_location + '::test2')
  712. self.assert_equal(sorted(os.listdir('output')), ['file2'])
  713. self.cmd('create', '--exclude=input/./file1', self.repository_location + '::test3', 'input')
  714. with changedir('output'):
  715. self.cmd('extract', self.repository_location + '::test3')
  716. self.assert_equal(sorted(os.listdir('output/input')), ['file2'])
  717. def test_repeated_files(self):
  718. self.create_regular_file('file1', size=1024 * 80)
  719. self.cmd('init', self.repository_location)
  720. self.cmd('create', self.repository_location + '::test', 'input', 'input')
  721. def test_overwrite(self):
  722. self.create_regular_file('file1', size=1024 * 80)
  723. self.create_regular_file('dir2/file2', size=1024 * 80)
  724. self.cmd('init', self.repository_location)
  725. self.cmd('create', self.repository_location + '::test', 'input')
  726. # Overwriting regular files and directories should be supported
  727. os.mkdir('output/input')
  728. os.mkdir('output/input/file1')
  729. os.mkdir('output/input/dir2')
  730. with changedir('output'):
  731. self.cmd('extract', self.repository_location + '::test')
  732. self.assert_dirs_equal('input', 'output/input')
  733. # But non-empty dirs should fail
  734. os.unlink('output/input/file1')
  735. os.mkdir('output/input/file1')
  736. os.mkdir('output/input/file1/dir')
  737. with changedir('output'):
  738. self.cmd('extract', self.repository_location + '::test', exit_code=1)
  739. def test_rename(self):
  740. self.create_regular_file('file1', size=1024 * 80)
  741. self.create_regular_file('dir2/file2', size=1024 * 80)
  742. self.cmd('init', self.repository_location)
  743. self.cmd('create', self.repository_location + '::test', 'input')
  744. self.cmd('create', self.repository_location + '::test.2', 'input')
  745. self.cmd('extract', '--dry-run', self.repository_location + '::test')
  746. self.cmd('extract', '--dry-run', self.repository_location + '::test.2')
  747. self.cmd('rename', self.repository_location + '::test', 'test.3')
  748. self.cmd('extract', '--dry-run', self.repository_location + '::test.2')
  749. self.cmd('rename', self.repository_location + '::test.2', 'test.4')
  750. self.cmd('extract', '--dry-run', self.repository_location + '::test.3')
  751. self.cmd('extract', '--dry-run', self.repository_location + '::test.4')
  752. # Make sure both archives have been renamed
  753. with Repository(self.repository_path) as repository:
  754. manifest, key = Manifest.load(repository)
  755. self.assert_equal(len(manifest.archives), 2)
  756. self.assert_in('test.3', manifest.archives)
  757. self.assert_in('test.4', manifest.archives)
  758. def test_delete(self):
  759. self.create_regular_file('file1', size=1024 * 80)
  760. self.create_regular_file('dir2/file2', size=1024 * 80)
  761. self.cmd('init', self.repository_location)
  762. self.cmd('create', self.repository_location + '::test', 'input')
  763. self.cmd('create', self.repository_location + '::test.2', 'input')
  764. self.cmd('extract', '--dry-run', self.repository_location + '::test')
  765. self.cmd('extract', '--dry-run', self.repository_location + '::test.2')
  766. self.cmd('delete', self.repository_location + '::test')
  767. self.cmd('extract', '--dry-run', self.repository_location + '::test.2')
  768. self.cmd('delete', '--stats', self.repository_location + '::test.2')
  769. # Make sure all data except the manifest has been deleted
  770. with Repository(self.repository_path) as repository:
  771. self.assert_equal(len(repository), 1)
  772. def test_delete_repo(self):
  773. self.create_regular_file('file1', size=1024 * 80)
  774. self.create_regular_file('dir2/file2', size=1024 * 80)
  775. self.cmd('init', self.repository_location)
  776. self.cmd('create', self.repository_location + '::test', 'input')
  777. self.cmd('create', self.repository_location + '::test.2', 'input')
  778. os.environ['BORG_DELETE_I_KNOW_WHAT_I_AM_DOING'] = 'no'
  779. self.cmd('delete', self.repository_location, exit_code=2)
  780. assert os.path.exists(self.repository_path)
  781. os.environ['BORG_DELETE_I_KNOW_WHAT_I_AM_DOING'] = 'YES'
  782. self.cmd('delete', self.repository_location)
  783. # Make sure the repo is gone
  784. self.assertFalse(os.path.exists(self.repository_path))
  785. def test_corrupted_repository(self):
  786. self.cmd('init', self.repository_location)
  787. self.create_src_archive('test')
  788. self.cmd('extract', '--dry-run', self.repository_location + '::test')
  789. self.cmd('check', self.repository_location)
  790. name = sorted(os.listdir(os.path.join(self.tmpdir, 'repository', 'data', '0')), reverse=True)[0]
  791. with open(os.path.join(self.tmpdir, 'repository', 'data', '0', name), 'r+b') as fd:
  792. fd.seek(100)
  793. fd.write(b'XXXX')
  794. self.cmd('check', self.repository_location, exit_code=1)
  795. # we currently need to be able to create a lock directory inside the repo:
  796. @pytest.mark.xfail(reason="we need to be able to create the lock directory inside the repo")
  797. def test_readonly_repository(self):
  798. self.cmd('init', self.repository_location)
  799. self.create_src_archive('test')
  800. os.system('chmod -R ugo-w ' + self.repository_path)
  801. try:
  802. self.cmd('extract', '--dry-run', self.repository_location + '::test')
  803. finally:
  804. # Restore permissions so shutil.rmtree is able to delete it
  805. os.system('chmod -R u+w ' + self.repository_path)
  806. def test_umask(self):
  807. self.create_regular_file('file1', size=1024 * 80)
  808. self.cmd('init', self.repository_location)
  809. self.cmd('create', self.repository_location + '::test', 'input')
  810. mode = os.stat(self.repository_path).st_mode
  811. self.assertEqual(stat.S_IMODE(mode), 0o700)
  812. def test_create_dry_run(self):
  813. self.cmd('init', self.repository_location)
  814. self.cmd('create', '--dry-run', self.repository_location + '::test', 'input')
  815. # Make sure no archive has been created
  816. with Repository(self.repository_path) as repository:
  817. manifest, key = Manifest.load(repository)
  818. self.assert_equal(len(manifest.archives), 0)
  819. def test_progress(self):
  820. self.create_regular_file('file1', size=1024 * 80)
  821. self.cmd('init', self.repository_location)
  822. # progress forced on
  823. output = self.cmd('create', '--progress', self.repository_location + '::test4', 'input')
  824. self.assert_in("\r", output)
  825. # progress forced off
  826. output = self.cmd('create', self.repository_location + '::test5', 'input')
  827. self.assert_not_in("\r", output)
  828. def test_file_status(self):
  829. """test that various file status show expected results
  830. clearly incomplete: only tests for the weird "unchanged" status for now"""
  831. now = time.time()
  832. self.create_regular_file('file1', size=1024 * 80)
  833. os.utime('input/file1', (now - 5, now - 5)) # 5 seconds ago
  834. self.create_regular_file('file2', size=1024 * 80)
  835. self.cmd('init', self.repository_location)
  836. output = self.cmd('create', '-v', '--list', self.repository_location + '::test', 'input')
  837. self.assert_in("A input/file1", output)
  838. self.assert_in("A input/file2", output)
  839. # should find first file as unmodified
  840. output = self.cmd('create', '-v', '--list', self.repository_location + '::test1', 'input')
  841. self.assert_in("U input/file1", output)
  842. # this is expected, although surprising, for why, see:
  843. # https://borgbackup.readthedocs.org/en/latest/faq.html#i-am-seeing-a-added-status-for-a-unchanged-file
  844. self.assert_in("A input/file2", output)
  845. def test_create_topical(self):
  846. now = time.time()
  847. self.create_regular_file('file1', size=1024 * 80)
  848. os.utime('input/file1', (now-5, now-5))
  849. self.create_regular_file('file2', size=1024 * 80)
  850. self.cmd('init', self.repository_location)
  851. # no listing by default
  852. output = self.cmd('create', self.repository_location + '::test', 'input')
  853. self.assert_not_in('file1', output)
  854. # shouldn't be listed even if unchanged
  855. output = self.cmd('create', self.repository_location + '::test0', 'input')
  856. self.assert_not_in('file1', output)
  857. # should list the file as unchanged
  858. output = self.cmd('create', '-v', '--list', '--filter=U', self.repository_location + '::test1', 'input')
  859. self.assert_in('file1', output)
  860. # should *not* list the file as changed
  861. output = self.cmd('create', '-v', '--filter=AM', self.repository_location + '::test2', 'input')
  862. self.assert_not_in('file1', output)
  863. # change the file
  864. self.create_regular_file('file1', size=1024 * 100)
  865. # should list the file as changed
  866. output = self.cmd('create', '-v', '--list', '--filter=AM', self.repository_location + '::test3', 'input')
  867. self.assert_in('file1', output)
  868. def test_create_read_special_broken_symlink(self):
  869. os.symlink('somewhere doesnt exist', os.path.join(self.input_path, 'link'))
  870. self.cmd('init', self.repository_location)
  871. archive = self.repository_location + '::test'
  872. self.cmd('create', '--read-special', archive, 'input')
  873. output = self.cmd('list', archive)
  874. assert 'input/link -> somewhere doesnt exist' in output
  875. # def test_cmdline_compatibility(self):
  876. # self.create_regular_file('file1', size=1024 * 80)
  877. # self.cmd('init', self.repository_location)
  878. # self.cmd('create', self.repository_location + '::test', 'input')
  879. # output = self.cmd('foo', self.repository_location, '--old')
  880. # self.assert_in('"--old" has been deprecated. Use "--new" instead', output)
  881. def test_prune_repository(self):
  882. self.cmd('init', self.repository_location)
  883. self.cmd('create', self.repository_location + '::test1', src_dir)
  884. self.cmd('create', self.repository_location + '::test2', src_dir)
  885. # these are not really a checkpoints, but they look like some:
  886. self.cmd('create', self.repository_location + '::test3.checkpoint', src_dir)
  887. self.cmd('create', self.repository_location + '::test3.checkpoint.1', src_dir)
  888. output = self.cmd('prune', '-v', '--list', '--dry-run', self.repository_location, '--keep-daily=2')
  889. self.assert_in('Would prune: test1', output)
  890. # must keep the latest non-checkpoint archive:
  891. self.assert_in('Keeping archive: test2', output)
  892. output = self.cmd('list', self.repository_location)
  893. self.assert_in('test1', output)
  894. self.assert_in('test2', output)
  895. self.assert_in('test3.checkpoint', output)
  896. self.assert_in('test3.checkpoint.1', output)
  897. self.cmd('prune', self.repository_location, '--keep-daily=2')
  898. output = self.cmd('list', self.repository_location)
  899. self.assert_not_in('test1', output)
  900. # the latest non-checkpoint archive must be still there:
  901. self.assert_in('test2', output)
  902. def test_prune_repository_save_space(self):
  903. self.cmd('init', self.repository_location)
  904. self.cmd('create', self.repository_location + '::test1', src_dir)
  905. self.cmd('create', self.repository_location + '::test2', src_dir)
  906. output = self.cmd('prune', '-v', '--list', '--dry-run', self.repository_location, '--keep-daily=2')
  907. self.assert_in('Keeping archive: test2', output)
  908. self.assert_in('Would prune: test1', output)
  909. output = self.cmd('list', self.repository_location)
  910. self.assert_in('test1', output)
  911. self.assert_in('test2', output)
  912. self.cmd('prune', '--save-space', self.repository_location, '--keep-daily=2')
  913. output = self.cmd('list', self.repository_location)
  914. self.assert_not_in('test1', output)
  915. self.assert_in('test2', output)
  916. def test_prune_repository_prefix(self):
  917. self.cmd('init', self.repository_location)
  918. self.cmd('create', self.repository_location + '::foo-2015-08-12-10:00', src_dir)
  919. self.cmd('create', self.repository_location + '::foo-2015-08-12-20:00', src_dir)
  920. self.cmd('create', self.repository_location + '::bar-2015-08-12-10:00', src_dir)
  921. self.cmd('create', self.repository_location + '::bar-2015-08-12-20:00', src_dir)
  922. output = self.cmd('prune', '-v', '--list', '--dry-run', self.repository_location, '--keep-daily=2', '--prefix=foo-')
  923. self.assert_in('Keeping archive: foo-2015-08-12-20:00', output)
  924. self.assert_in('Would prune: foo-2015-08-12-10:00', output)
  925. output = self.cmd('list', self.repository_location)
  926. self.assert_in('foo-2015-08-12-10:00', output)
  927. self.assert_in('foo-2015-08-12-20:00', output)
  928. self.assert_in('bar-2015-08-12-10:00', output)
  929. self.assert_in('bar-2015-08-12-20:00', output)
  930. self.cmd('prune', self.repository_location, '--keep-daily=2', '--prefix=foo-')
  931. output = self.cmd('list', self.repository_location)
  932. self.assert_not_in('foo-2015-08-12-10:00', output)
  933. self.assert_in('foo-2015-08-12-20:00', output)
  934. self.assert_in('bar-2015-08-12-10:00', output)
  935. self.assert_in('bar-2015-08-12-20:00', output)
  936. def test_list_prefix(self):
  937. self.cmd('init', self.repository_location)
  938. self.cmd('create', self.repository_location + '::test-1', src_dir)
  939. self.cmd('create', self.repository_location + '::something-else-than-test-1', src_dir)
  940. self.cmd('create', self.repository_location + '::test-2', src_dir)
  941. output = self.cmd('list', '--prefix=test-', self.repository_location)
  942. self.assert_in('test-1', output)
  943. self.assert_in('test-2', output)
  944. self.assert_not_in('something-else', output)
  945. def test_list_list_format(self):
  946. self.cmd('init', self.repository_location)
  947. test_archive = self.repository_location + '::test'
  948. self.cmd('create', test_archive, src_dir)
  949. output_1 = self.cmd('list', test_archive)
  950. output_2 = self.cmd('list', '--list-format', '{mode} {user:6} {group:6} {size:8d} {isomtime} {path}{extra}{NEWLINE}', test_archive)
  951. output_3 = self.cmd('list', '--list-format', '{mtime:%s} {path}{NEWLINE}', test_archive)
  952. self.assertEqual(output_1, output_2)
  953. self.assertNotEqual(output_1, output_3)
  954. def test_break_lock(self):
  955. self.cmd('init', self.repository_location)
  956. self.cmd('break-lock', self.repository_location)
  957. def test_usage(self):
  958. if self.FORK_DEFAULT:
  959. self.cmd(exit_code=0)
  960. self.cmd('-h', exit_code=0)
  961. else:
  962. self.assert_raises(SystemExit, lambda: self.cmd())
  963. self.assert_raises(SystemExit, lambda: self.cmd('-h'))
  964. def test_help(self):
  965. assert 'Borg' in self.cmd('help')
  966. assert 'patterns' in self.cmd('help', 'patterns')
  967. assert 'Initialize' in self.cmd('help', 'init')
  968. assert 'positional arguments' not in self.cmd('help', 'init', '--epilog-only')
  969. assert 'This command initializes' not in self.cmd('help', 'init', '--usage-only')
  970. @unittest.skipUnless(has_llfuse, 'llfuse not installed')
  971. def test_fuse(self):
  972. def has_noatime(some_file):
  973. atime_before = os.stat(some_file).st_atime_ns
  974. try:
  975. os.close(os.open(some_file, flags_noatime))
  976. except PermissionError:
  977. return False
  978. else:
  979. atime_after = os.stat(some_file).st_atime_ns
  980. noatime_used = flags_noatime != flags_normal
  981. return noatime_used and atime_before == atime_after
  982. self.cmd('init', self.repository_location)
  983. self.create_test_files()
  984. have_noatime = has_noatime('input/file1')
  985. self.cmd('create', self.repository_location + '::archive', 'input')
  986. self.cmd('create', self.repository_location + '::archive2', 'input')
  987. if has_lchflags:
  988. # remove the file we did not backup, so input and mount become equal
  989. os.remove(os.path.join('input', 'flagfile'))
  990. mountpoint = os.path.join(self.tmpdir, 'mountpoint')
  991. # mount the whole repository, archive contents shall show up in archivename subdirs of mountpoint:
  992. with self.fuse_mount(self.repository_location, mountpoint):
  993. # bsdflags are not supported by the FUSE mount
  994. # we also ignore xattrs here, they are tested separately
  995. self.assert_dirs_equal(self.input_path, os.path.join(mountpoint, 'archive', 'input'),
  996. ignore_bsdflags=True, ignore_xattrs=True)
  997. self.assert_dirs_equal(self.input_path, os.path.join(mountpoint, 'archive2', 'input'),
  998. ignore_bsdflags=True, ignore_xattrs=True)
  999. # mount only 1 archive, its contents shall show up directly in mountpoint:
  1000. with self.fuse_mount(self.repository_location + '::archive', mountpoint):
  1001. self.assert_dirs_equal(self.input_path, os.path.join(mountpoint, 'input'),
  1002. ignore_bsdflags=True, ignore_xattrs=True)
  1003. # regular file
  1004. in_fn = 'input/file1'
  1005. out_fn = os.path.join(mountpoint, 'input', 'file1')
  1006. # stat
  1007. sti1 = os.stat(in_fn)
  1008. sto1 = os.stat(out_fn)
  1009. assert sti1.st_mode == sto1.st_mode
  1010. assert sti1.st_uid == sto1.st_uid
  1011. assert sti1.st_gid == sto1.st_gid
  1012. assert sti1.st_size == sto1.st_size
  1013. if have_noatime:
  1014. assert sti1.st_atime == sto1.st_atime
  1015. assert sti1.st_ctime == sto1.st_ctime
  1016. assert sti1.st_mtime == sto1.st_mtime
  1017. # note: there is another hardlink to this, see below
  1018. assert sti1.st_nlink == sto1.st_nlink == 2
  1019. # read
  1020. with open(in_fn, 'rb') as in_f, open(out_fn, 'rb') as out_f:
  1021. assert in_f.read() == out_f.read()
  1022. # hardlink (to 'input/file1')
  1023. in_fn = 'input/hardlink'
  1024. out_fn = os.path.join(mountpoint, 'input', 'hardlink')
  1025. sti2 = os.stat(in_fn)
  1026. sto2 = os.stat(out_fn)
  1027. assert sti2.st_nlink == sto2.st_nlink == 2
  1028. assert sto1.st_ino == sto2.st_ino
  1029. # symlink
  1030. in_fn = 'input/link1'
  1031. out_fn = os.path.join(mountpoint, 'input', 'link1')
  1032. sti = os.stat(in_fn, follow_symlinks=False)
  1033. sto = os.stat(out_fn, follow_symlinks=False)
  1034. assert stat.S_ISLNK(sti.st_mode)
  1035. assert stat.S_ISLNK(sto.st_mode)
  1036. assert os.readlink(in_fn) == os.readlink(out_fn)
  1037. # FIFO
  1038. out_fn = os.path.join(mountpoint, 'input', 'fifo1')
  1039. sto = os.stat(out_fn)
  1040. assert stat.S_ISFIFO(sto.st_mode)
  1041. # list/read xattrs
  1042. try:
  1043. in_fn = 'input/fusexattr'
  1044. out_fn = os.path.join(mountpoint, 'input', 'fusexattr')
  1045. if not xattr.XATTR_FAKEROOT and xattr.is_enabled(self.input_path):
  1046. assert no_selinux(xattr.listxattr(out_fn)) == ['user.foo', ]
  1047. assert xattr.getxattr(out_fn, 'user.foo') == b'bar'
  1048. else:
  1049. assert xattr.listxattr(out_fn) == []
  1050. try:
  1051. xattr.getxattr(out_fn, 'user.foo')
  1052. except OSError as e:
  1053. assert e.errno == llfuse.ENOATTR
  1054. else:
  1055. assert False, "expected OSError(ENOATTR), but no error was raised"
  1056. except OSError as err:
  1057. if sys.platform.startswith(('freebsd', )) and err.errno == errno.ENOTSUP:
  1058. # some systems have no xattr support on FUSE
  1059. pass
  1060. else:
  1061. raise
  1062. @unittest.skipUnless(has_llfuse, 'llfuse not installed')
  1063. def test_fuse_allow_damaged_files(self):
  1064. self.cmd('init', self.repository_location)
  1065. self.create_src_archive('archive')
  1066. # Get rid of a chunk and repair it
  1067. archive, repository = self.open_archive('archive')
  1068. with repository:
  1069. for item in archive.iter_items():
  1070. if item[b'path'].endswith('testsuite/archiver.py'):
  1071. repository.delete(item[b'chunks'][-1][0])
  1072. path = item[b'path'] # store full path for later
  1073. break
  1074. else:
  1075. assert False # missed the file
  1076. repository.commit()
  1077. self.cmd('check', '--repair', self.repository_location, exit_code=0)
  1078. mountpoint = os.path.join(self.tmpdir, 'mountpoint')
  1079. with self.fuse_mount(self.repository_location + '::archive', mountpoint):
  1080. with pytest.raises(OSError) as excinfo:
  1081. open(os.path.join(mountpoint, path))
  1082. assert excinfo.value.errno == errno.EIO
  1083. with self.fuse_mount(self.repository_location + '::archive', mountpoint, '-o', 'allow_damaged_files'):
  1084. open(os.path.join(mountpoint, path)).close()
  1085. def verify_aes_counter_uniqueness(self, method):
  1086. seen = set() # Chunks already seen
  1087. used = set() # counter values already used
  1088. def verify_uniqueness():
  1089. with Repository(self.repository_path) as repository:
  1090. for id, _ in repository.open_index(repository.get_transaction_id()).iteritems():
  1091. data = repository.get(id)
  1092. hash = sha256(data).digest()
  1093. if hash not in seen:
  1094. seen.add(hash)
  1095. num_blocks = num_aes_blocks(len(data) - 41)
  1096. nonce = bytes_to_long(data[33:41])
  1097. for counter in range(nonce, nonce + num_blocks):
  1098. self.assert_not_in(counter, used)
  1099. used.add(counter)
  1100. self.create_test_files()
  1101. os.environ['BORG_PASSPHRASE'] = 'passphrase'
  1102. self.cmd('init', '--encryption=' + method, self.repository_location)
  1103. verify_uniqueness()
  1104. self.cmd('create', self.repository_location + '::test', 'input')
  1105. verify_uniqueness()
  1106. self.cmd('create', self.repository_location + '::test.2', 'input')
  1107. verify_uniqueness()
  1108. self.cmd('delete', self.repository_location + '::test.2')
  1109. verify_uniqueness()
  1110. self.assert_equal(used, set(range(len(used))))
  1111. def test_aes_counter_uniqueness_keyfile(self):
  1112. self.verify_aes_counter_uniqueness('keyfile')
  1113. def test_aes_counter_uniqueness_passphrase(self):
  1114. self.verify_aes_counter_uniqueness('repokey')
  1115. def test_debug_dump_archive_items(self):
  1116. self.create_test_files()
  1117. self.cmd('init', self.repository_location)
  1118. self.cmd('create', self.repository_location + '::test', 'input')
  1119. with changedir('output'):
  1120. output = self.cmd('debug-dump-archive-items', self.repository_location + '::test')
  1121. output_dir = sorted(os.listdir('output'))
  1122. assert len(output_dir) > 0 and output_dir[0].startswith('000000_')
  1123. assert 'Done.' in output
  1124. def test_debug_dump_repo_objs(self):
  1125. self.create_test_files()
  1126. self.cmd('init', self.repository_location)
  1127. self.cmd('create', self.repository_location + '::test', 'input')
  1128. with changedir('output'):
  1129. output = self.cmd('debug-dump-repo-objs', self.repository_location)
  1130. output_dir = sorted(os.listdir('output'))
  1131. assert len(output_dir) > 0 and output_dir[0].startswith('000000_')
  1132. assert 'Done.' in output
  1133. def test_debug_put_get_delete_obj(self):
  1134. self.cmd('init', self.repository_location)
  1135. data = b'some data'
  1136. hexkey = sha256(data).hexdigest()
  1137. self.create_regular_file('file', contents=data)
  1138. output = self.cmd('debug-put-obj', self.repository_location, 'input/file')
  1139. assert hexkey in output
  1140. output = self.cmd('debug-get-obj', self.repository_location, hexkey, 'output/file')
  1141. assert hexkey in output
  1142. with open('output/file', 'rb') as f:
  1143. data_read = f.read()
  1144. assert data == data_read
  1145. output = self.cmd('debug-delete-obj', self.repository_location, hexkey)
  1146. assert "deleted" in output
  1147. output = self.cmd('debug-delete-obj', self.repository_location, hexkey)
  1148. assert "not found" in output
  1149. output = self.cmd('debug-delete-obj', self.repository_location, 'invalid')
  1150. assert "is invalid" in output
  1151. def test_key_export_keyfile(self):
  1152. export_file = self.output_path + '/exported'
  1153. self.cmd('init', self.repository_location, '--encryption', 'keyfile')
  1154. repo_id = self._extract_repository_id(self.repository_path)
  1155. self.cmd('key', 'export', self.repository_location, export_file)
  1156. with open(export_file, 'r') as fd:
  1157. export_contents = fd.read()
  1158. assert export_contents.startswith('BORG_KEY ' + bin_to_hex(repo_id) + '\n')
  1159. key_file = self.keys_path + '/' + os.listdir(self.keys_path)[0]
  1160. with open(key_file, 'r') as fd:
  1161. key_contents = fd.read()
  1162. assert key_contents == export_contents
  1163. os.unlink(key_file)
  1164. self.cmd('key', 'import', self.repository_location, export_file)
  1165. with open(key_file, 'r') as fd:
  1166. key_contents2 = fd.read()
  1167. assert key_contents2 == key_contents
  1168. def test_key_export_repokey(self):
  1169. export_file = self.output_path + '/exported'
  1170. self.cmd('init', self.repository_location, '--encryption', 'repokey')
  1171. repo_id = self._extract_repository_id(self.repository_path)
  1172. self.cmd('key', 'export', self.repository_location, export_file)
  1173. with open(export_file, 'r') as fd:
  1174. export_contents = fd.read()
  1175. assert export_contents.startswith('BORG_KEY ' + bin_to_hex(repo_id) + '\n')
  1176. with Repository(self.repository_path) as repository:
  1177. repo_key = RepoKey(repository)
  1178. repo_key.load(None, Passphrase.env_passphrase())
  1179. backup_key = KeyfileKey(key.KeyTestCase.MockRepository())
  1180. backup_key.load(export_file, Passphrase.env_passphrase())
  1181. assert repo_key.enc_key == backup_key.enc_key
  1182. with Repository(self.repository_path) as repository:
  1183. repository.save_key(b'')
  1184. self.cmd('key', 'import', self.repository_location, export_file)
  1185. with Repository(self.repository_path) as repository:
  1186. repo_key2 = RepoKey(repository)
  1187. repo_key2.load(None, Passphrase.env_passphrase())
  1188. assert repo_key2.enc_key == repo_key2.enc_key
  1189. def test_key_import_errors(self):
  1190. export_file = self.output_path + '/exported'
  1191. self.cmd('init', self.repository_location, '--encryption', 'keyfile')
  1192. self.cmd('key', 'import', self.repository_location, export_file, exit_code=EXIT_ERROR)
  1193. with open(export_file, 'w') as fd:
  1194. fd.write('something not a key\n')
  1195. if self.FORK_DEFAULT:
  1196. self.cmd('key', 'import', self.repository_location, export_file, exit_code=2)
  1197. else:
  1198. self.assert_raises(NotABorgKeyFile, lambda: self.cmd('key', 'import', self.repository_location, export_file))
  1199. with open(export_file, 'w') as fd:
  1200. fd.write('BORG_KEY a0a0a0\n')
  1201. if self.FORK_DEFAULT:
  1202. self.cmd('key', 'import', self.repository_location, export_file, exit_code=2)
  1203. else:
  1204. self.assert_raises(RepoIdMismatch, lambda: self.cmd('key', 'import', self.repository_location, export_file))
  1205. def test_key_export_paperkey(self):
  1206. repo_id = 'e294423506da4e1ea76e8dcdf1a3919624ae3ae496fddf905610c351d3f09239'
  1207. export_file = self.output_path + '/exported'
  1208. self.cmd('init', self.repository_location, '--encryption', 'keyfile')
  1209. self._set_repository_id(self.repository_path, unhexlify(repo_id))
  1210. key_file = self.keys_path + '/' + os.listdir(self.keys_path)[0]
  1211. with open(key_file, 'w') as fd:
  1212. fd.write(KeyfileKey.FILE_ID + ' ' + repo_id + '\n')
  1213. fd.write(b2a_base64(b'abcdefghijklmnopqrstu').decode())
  1214. self.cmd('key', 'export', '--paper', self.repository_location, export_file)
  1215. with open(export_file, 'r') as fd:
  1216. export_contents = fd.read()
  1217. assert export_contents == """To restore key use borg key import --paper /path/to/repo
  1218. BORG PAPER KEY v1
  1219. id: 2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02
  1220. 1: 616263 646566 676869 6a6b6c 6d6e6f 707172 - 6d
  1221. 2: 737475 - 88
  1222. """
  1223. @unittest.skipUnless('binary' in BORG_EXES, 'no borg.exe available')
  1224. class ArchiverTestCaseBinary(ArchiverTestCase):
  1225. EXE = 'borg.exe'
  1226. FORK_DEFAULT = True
  1227. @unittest.skip('test_basic_functionality seems incompatible with fakeroot and/or the binary.')
  1228. def test_basic_functionality(self):
  1229. pass
  1230. @unittest.skip('test_overwrite seems incompatible with fakeroot and/or the binary.')
  1231. def test_overwrite(self):
  1232. pass
  1233. def test_fuse(self):
  1234. if fakeroot_detected():
  1235. unittest.skip('test_fuse with the binary is not compatible with fakeroot')
  1236. else:
  1237. super().test_fuse()
  1238. class ArchiverCheckTestCase(ArchiverTestCaseBase):
  1239. def setUp(self):
  1240. super().setUp()
  1241. with patch.object(ChunkBuffer, 'BUFFER_SIZE', 10):
  1242. self.cmd('init', self.repository_location)
  1243. self.create_src_archive('archive1')
  1244. self.create_src_archive('archive2')
  1245. def test_check_usage(self):
  1246. output = self.cmd('check', '-v', self.repository_location, exit_code=0)
  1247. self.assert_in('Starting repository check', output)
  1248. self.assert_in('Starting archive consistency check', output)
  1249. output = self.cmd('check', '-v', '--repository-only', self.repository_location, exit_code=0)
  1250. self.assert_in('Starting repository check', output)
  1251. self.assert_not_in('Starting archive consistency check', output)
  1252. output = self.cmd('check', '-v', '--archives-only', self.repository_location, exit_code=0)
  1253. self.assert_not_in('Starting repository check', output)
  1254. self.assert_in('Starting archive consistency check', output)
  1255. output = self.cmd('check', '-v', '--archives-only', '--prefix=archive2', self.repository_location, exit_code=0)
  1256. self.assert_not_in('archive1', output)
  1257. def test_missing_file_chunk(self):
  1258. archive, repository = self.open_archive('archive1')
  1259. with repository:
  1260. for item in archive.iter_items():
  1261. if item[b'path'].endswith('testsuite/archiver.py'):
  1262. valid_chunks = item[b'chunks']
  1263. killed_chunk = valid_chunks[-1]
  1264. repository.delete(killed_chunk[0])
  1265. break
  1266. else:
  1267. self.assert_true(False) # should not happen
  1268. repository.commit()
  1269. self.cmd('check', self.repository_location, exit_code=1)
  1270. output = self.cmd('check', '--repair', self.repository_location, exit_code=0)
  1271. self.assert_in('New missing file chunk detected', output)
  1272. self.cmd('check', self.repository_location, exit_code=0)
  1273. # check that the file in the old archives has now a different chunk list without the killed chunk
  1274. for archive_name in ('archive1', 'archive2'):
  1275. archive, repository = self.open_archive(archive_name)
  1276. with repository:
  1277. for item in archive.iter_items():
  1278. if item[b'path'].endswith('testsuite/archiver.py'):
  1279. self.assert_not_equal(valid_chunks, item[b'chunks'])
  1280. self.assert_not_in(killed_chunk, item[b'chunks'])
  1281. break
  1282. else:
  1283. self.assert_true(False) # should not happen
  1284. # do a fresh backup (that will include the killed chunk)
  1285. with patch.object(ChunkBuffer, 'BUFFER_SIZE', 10):
  1286. self.create_src_archive('archive3')
  1287. # check should be able to heal the file now:
  1288. output = self.cmd('check', '-v', '--repair', self.repository_location, exit_code=0)
  1289. self.assert_in('Healed previously missing file chunk', output)
  1290. self.assert_in('testsuite/archiver.py: Completely healed previously damaged file!', output)
  1291. # check that the file in the old archives has the correct chunks again
  1292. for archive_name in ('archive1', 'archive2'):
  1293. archive, repository = self.open_archive(archive_name)
  1294. with repository:
  1295. for item in archive.iter_items():
  1296. if item[b'path'].endswith('testsuite/archiver.py'):
  1297. self.assert_equal(valid_chunks, item[b'chunks'])
  1298. break
  1299. else:
  1300. self.assert_true(False) # should not happen
  1301. def test_missing_archive_item_chunk(self):
  1302. archive, repository = self.open_archive('archive1')
  1303. with repository:
  1304. repository.delete(archive.metadata[b'items'][-5])
  1305. repository.commit()
  1306. self.cmd('check', self.repository_location, exit_code=1)
  1307. self.cmd('check', '--repair', self.repository_location, exit_code=0)
  1308. self.cmd('check', self.repository_location, exit_code=0)
  1309. def test_missing_archive_metadata(self):
  1310. archive, repository = self.open_archive('archive1')
  1311. with repository:
  1312. repository.delete(archive.id)
  1313. repository.commit()
  1314. self.cmd('check', self.repository_location, exit_code=1)
  1315. self.cmd('check', '--repair', self.repository_location, exit_code=0)
  1316. self.cmd('check', self.repository_location, exit_code=0)
  1317. def test_missing_manifest(self):
  1318. archive, repository = self.open_archive('archive1')
  1319. with repository:
  1320. repository.delete(Manifest.MANIFEST_ID)
  1321. repository.commit()
  1322. self.cmd('check', self.repository_location, exit_code=1)
  1323. output = self.cmd('check', '-v', '--repair', self.repository_location, exit_code=0)
  1324. self.assert_in('archive1', output)
  1325. self.assert_in('archive2', output)
  1326. self.cmd('check', self.repository_location, exit_code=0)
  1327. def test_corrupted_manifest(self):
  1328. archive, repository = self.open_archive('archive1')
  1329. with repository:
  1330. manifest = repository.get(Manifest.MANIFEST_ID)
  1331. corrupted_manifest = manifest + b'corrupted!'
  1332. repository.put(Manifest.MANIFEST_ID, corrupted_manifest)
  1333. repository.commit()
  1334. self.cmd('check', self.repository_location, exit_code=1)
  1335. output = self.cmd('check', '-v', '--repair', self.repository_location, exit_code=0)
  1336. self.assert_in('archive1', output)
  1337. self.assert_in('archive2', output)
  1338. self.cmd('check', self.repository_location, exit_code=0)
  1339. def test_manifest_rebuild_corrupted_chunk(self):
  1340. archive, repository = self.open_archive('archive1')
  1341. with repository:
  1342. manifest = repository.get(Manifest.MANIFEST_ID)
  1343. corrupted_manifest = manifest + b'corrupted!'
  1344. repository.put(Manifest.MANIFEST_ID, corrupted_manifest)
  1345. chunk = repository.get(archive.id)
  1346. corrupted_chunk = chunk + b'corrupted!'
  1347. repository.put(archive.id, corrupted_chunk)
  1348. repository.commit()
  1349. self.cmd('check', self.repository_location, exit_code=1)
  1350. output = self.cmd('check', '-v', '--repair', self.repository_location, exit_code=0)
  1351. self.assert_in('archive2', output)
  1352. self.cmd('check', self.repository_location, exit_code=0)
  1353. def test_manifest_rebuild_duplicate_archive(self):
  1354. archive, repository = self.open_archive('archive1')
  1355. key = archive.key
  1356. with repository:
  1357. manifest = repository.get(Manifest.MANIFEST_ID)
  1358. corrupted_manifest = manifest + b'corrupted!'
  1359. repository.put(Manifest.MANIFEST_ID, corrupted_manifest)
  1360. archive = msgpack.packb({
  1361. 'cmdline': [],
  1362. 'items': [],
  1363. 'hostname': 'foo',
  1364. 'username': 'bar',
  1365. 'name': 'archive1',
  1366. 'time': '2016-12-15T18:49:51.849711',
  1367. 'version': 1,
  1368. })
  1369. archive_id = key.id_hash(archive)
  1370. repository.put(archive_id, key.encrypt(archive))
  1371. repository.commit()
  1372. self.cmd('check', self.repository_location, exit_code=1)
  1373. self.cmd('check', '--repair', self.repository_location, exit_code=0)
  1374. output = self.cmd('list', self.repository_location)
  1375. self.assert_in('archive1', output)
  1376. self.assert_in('archive1.1', output)
  1377. self.assert_in('archive2', output)
  1378. def test_extra_chunks(self):
  1379. self.cmd('check', self.repository_location, exit_code=0)
  1380. with Repository(self.repository_location, exclusive=True) as repository:
  1381. repository.put(b'01234567890123456789012345678901', b'xxxx')
  1382. repository.commit()
  1383. self.cmd('check', self.repository_location, exit_code=1)
  1384. self.cmd('check', self.repository_location, exit_code=1)
  1385. self.cmd('check', '--repair', self.repository_location, exit_code=0)
  1386. self.cmd('check', self.repository_location, exit_code=0)
  1387. self.cmd('extract', '--dry-run', self.repository_location + '::archive1', exit_code=0)
  1388. def test_empty_repository(self):
  1389. with Repository(self.repository_location, exclusive=True) as repository:
  1390. for id_ in repository.list():
  1391. repository.delete(id_)
  1392. repository.commit()
  1393. self.cmd('check', self.repository_location, exit_code=1)
  1394. def test_attic013_acl_bug(self):
  1395. # Attic up to release 0.13 contained a bug where every item unintentionally received
  1396. # a b'acl'=None key-value pair.
  1397. # This bug can still live on in Borg repositories (through borg upgrade).
  1398. archive, repository = self.open_archive('archive1')
  1399. with repository:
  1400. manifest, key = Manifest.load(repository)
  1401. with Cache(repository, key, manifest) as cache:
  1402. archive = Archive(repository, key, manifest, '0.13', cache=cache, create=True)
  1403. archive.items_buffer.add({
  1404. # path and mtime are required.
  1405. b'path': '1234',
  1406. b'mtime': 0,
  1407. # acl is the offending key.
  1408. b'acl': None
  1409. })
  1410. archive.save()
  1411. self.cmd('check', self.repository_location, exit_code=0)
  1412. class ManifestAuthenticationTest(ArchiverTestCaseBase):
  1413. def spoof_manifest(self, repository):
  1414. with repository:
  1415. _, key = Manifest.load(repository)
  1416. repository.put(Manifest.MANIFEST_ID, key.encrypt(msgpack.packb({
  1417. 'version': 1,
  1418. 'archives': {},
  1419. 'config': {},
  1420. 'timestamp': (datetime.utcnow() + timedelta(days=1)).isoformat(),
  1421. })))
  1422. repository.commit()
  1423. def test_fresh_init_tam_required(self):
  1424. self.cmd('init', self.repository_location)
  1425. repository = Repository(self.repository_path, exclusive=True)
  1426. with repository:
  1427. manifest, key = Manifest.load(repository)
  1428. repository.put(Manifest.MANIFEST_ID, key.encrypt(msgpack.packb({
  1429. 'version': 1,
  1430. 'archives': {},
  1431. 'timestamp': (datetime.utcnow() + timedelta(days=1)).isoformat(),
  1432. })))
  1433. repository.commit()
  1434. with pytest.raises(TAMRequiredError):
  1435. self.cmd('list', self.repository_location)
  1436. def test_not_required(self):
  1437. self.cmd('init', self.repository_location)
  1438. self.create_src_archive('archive1234')
  1439. repository = Repository(self.repository_path, exclusive=True)
  1440. with repository:
  1441. shutil.rmtree(get_security_dir(bin_to_hex(repository.id)))
  1442. _, key = Manifest.load(repository)
  1443. key.tam_required = False
  1444. key.change_passphrase(key._passphrase)
  1445. manifest = msgpack.unpackb(key.decrypt(None, repository.get(Manifest.MANIFEST_ID)))
  1446. del manifest[b'tam']
  1447. repository.put(Manifest.MANIFEST_ID, key.encrypt(msgpack.packb(manifest)))
  1448. repository.commit()
  1449. output = self.cmd('list', '--debug', self.repository_location)
  1450. assert 'archive1234' in output
  1451. assert 'TAM not found and not required' in output
  1452. # Run upgrade
  1453. self.cmd('upgrade', '--tam', self.repository_location)
  1454. # Manifest must be authenticated now
  1455. output = self.cmd('list', '--debug', self.repository_location)
  1456. assert 'archive1234' in output
  1457. assert 'TAM-verified manifest' in output
  1458. # Try to spoof / modify pre-1.0.9
  1459. self.spoof_manifest(repository)
  1460. # Fails
  1461. with pytest.raises(TAMRequiredError):
  1462. self.cmd('list', self.repository_location)
  1463. # Force upgrade
  1464. self.cmd('upgrade', '--tam', '--force', self.repository_location)
  1465. self.cmd('list', self.repository_location)
  1466. def test_disable(self):
  1467. self.cmd('init', self.repository_location)
  1468. self.create_src_archive('archive1234')
  1469. self.cmd('upgrade', '--disable-tam', self.repository_location)
  1470. repository = Repository(self.repository_path, exclusive=True)
  1471. self.spoof_manifest(repository)
  1472. assert not self.cmd('list', self.repository_location)
  1473. def test_disable2(self):
  1474. self.cmd('init', self.repository_location)
  1475. self.create_src_archive('archive1234')
  1476. repository = Repository(self.repository_path, exclusive=True)
  1477. self.spoof_manifest(repository)
  1478. self.cmd('upgrade', '--disable-tam', self.repository_location)
  1479. assert not self.cmd('list', self.repository_location)
  1480. @pytest.mark.skipif(sys.platform == 'cygwin', reason='remote is broken on cygwin and hangs')
  1481. class RemoteArchiverTestCase(ArchiverTestCase):
  1482. prefix = '__testsuite__:'
  1483. def test_remote_repo_restrict_to_path(self):
  1484. # restricted to repo directory itself:
  1485. with patch.object(RemoteRepository, 'extra_test_args', ['--restrict-to-path', self.repository_path]):
  1486. self.cmd('init', self.repository_location)
  1487. # restricted to repo directory itself, fail for other directories with same prefix:
  1488. with patch.object(RemoteRepository, 'extra_test_args', ['--restrict-to-path', self.repository_path]):
  1489. self.assert_raises(PathNotAllowed, lambda: self.cmd('init', self.repository_location + '_0'))
  1490. # restricted to a completely different path:
  1491. with patch.object(RemoteRepository, 'extra_test_args', ['--restrict-to-path', '/foo']):
  1492. self.assert_raises(PathNotAllowed, lambda: self.cmd('init', self.repository_location + '_1'))
  1493. path_prefix = os.path.dirname(self.repository_path)
  1494. # restrict to repo directory's parent directory:
  1495. with patch.object(RemoteRepository, 'extra_test_args', ['--restrict-to-path', path_prefix]):
  1496. self.cmd('init', self.repository_location + '_2')
  1497. # restrict to repo directory's parent directory and another directory:
  1498. with patch.object(RemoteRepository, 'extra_test_args', ['--restrict-to-path', '/foo', '--restrict-to-path', path_prefix]):
  1499. self.cmd('init', self.repository_location + '_3')
  1500. @unittest.skip('only works locally')
  1501. def test_debug_put_get_delete_obj(self):
  1502. pass
  1503. def test_strip_components_doesnt_leak(self):
  1504. self.cmd('init', self.repository_location)
  1505. self.create_regular_file('dir/file', contents=b"test file contents 1")
  1506. self.create_regular_file('dir/file2', contents=b"test file contents 2")
  1507. self.create_regular_file('skipped-file1', contents=b"test file contents 3")
  1508. self.create_regular_file('skipped-file2', contents=b"test file contents 4")
  1509. self.create_regular_file('skipped-file3', contents=b"test file contents 5")
  1510. self.cmd('create', self.repository_location + '::test', 'input')
  1511. marker = 'cached responses left in RemoteRepository'
  1512. with changedir('output'):
  1513. res = self.cmd('extract', "--debug", self.repository_location + '::test', '--strip-components', '3')
  1514. self.assert_true(marker not in res)
  1515. with self.assert_creates_file('file'):
  1516. res = self.cmd('extract', "--debug", self.repository_location + '::test', '--strip-components', '2')
  1517. self.assert_true(marker not in res)
  1518. with self.assert_creates_file('dir/file'):
  1519. res = self.cmd('extract', "--debug", self.repository_location + '::test', '--strip-components', '1')
  1520. self.assert_true(marker not in res)
  1521. with self.assert_creates_file('input/dir/file'):
  1522. res = self.cmd('extract', "--debug", self.repository_location + '::test', '--strip-components', '0')
  1523. self.assert_true(marker not in res)
  1524. def test_get_args():
  1525. archiver = Archiver()
  1526. # everything normal:
  1527. # first param is argv as produced by ssh forced command,
  1528. # second param is like from SSH_ORIGINAL_COMMAND env variable
  1529. args = archiver.get_args(['borg', 'serve', '--restrict-to-path=/p1', '--restrict-to-path=/p2', ],
  1530. 'borg serve --info --umask=0027')
  1531. assert args.func == archiver.do_serve
  1532. assert args.restrict_to_paths == ['/p1', '/p2']
  1533. assert args.umask == 0o027
  1534. assert args.log_level == 'info'
  1535. # trying to cheat - break out of path restriction
  1536. args = archiver.get_args(['borg', 'serve', '--restrict-to-path=/p1', '--restrict-to-path=/p2', ],
  1537. 'borg serve --restrict-to-path=/')
  1538. assert args.restrict_to_paths == ['/p1', '/p2']
  1539. # trying to cheat - try to execute different subcommand
  1540. args = archiver.get_args(['borg', 'serve', '--restrict-to-path=/p1', '--restrict-to-path=/p2', ],
  1541. 'borg init /')
  1542. assert args.func == archiver.do_serve
  1543. class TestBuildFilter:
  1544. def test_basic(self):
  1545. matcher = PatternMatcher()
  1546. matcher.add([parse_pattern('included')], True)
  1547. filter = Archiver.build_filter(matcher)
  1548. assert filter({b'path': 'included'})
  1549. assert filter({b'path': 'included/file'})
  1550. assert not filter({b'path': 'something else'})
  1551. def test_empty(self):
  1552. matcher = PatternMatcher(fallback=True)
  1553. filter = Archiver.build_filter(matcher)
  1554. assert filter({b'path': 'anything'})
  1555. def test_strip_components(self):
  1556. matcher = PatternMatcher(fallback=True)
  1557. filter = Archiver.build_filter(matcher, strip_components=1)
  1558. assert not filter({b'path': 'shallow'})
  1559. assert not filter({b'path': 'shallow/'}) # can this even happen? paths are normalized...
  1560. assert filter({b'path': 'deep enough/file'})
  1561. assert filter({b'path': 'something/dir/file'})