Parcourir la source

repo: do not put objects that we won't get, fixes #1451

we will not get() objects that have a segment entry larger than MAX_OBJECT_SIZE.
thus we should never produce such entries.

also: introduce repository.MAX_DATA_SIZE that gives the max payload size.
Thomas Waldmann il y a 8 ans
Parent
commit
a360307938
3 fichiers modifiés avec 24 ajouts et 2 suppressions
  1. 8 1
      borg/repository.py
  2. 8 1
      borg/testsuite/repository.py
  3. 8 0
      docs/changes.rst

+ 8 - 1
borg/repository.py

@@ -731,8 +731,12 @@ class LoggedIO:
         return size, tag, key, data
         return size, tag, key, data
 
 
     def write_put(self, id, data, raise_full=False):
     def write_put(self, id, data, raise_full=False):
+        data_size = len(data)
+        if data_size > MAX_DATA_SIZE:
+            # this would push the segment entry size beyond MAX_OBJECT_SIZE.
+            raise IntegrityError('More than allowed put data [{} > {}]'.format(data_size, MAX_DATA_SIZE))
         fd = self.get_write_fd(raise_full=raise_full)
         fd = self.get_write_fd(raise_full=raise_full)
-        size = len(data) + self.put_header_fmt.size
+        size = data_size + self.put_header_fmt.size
         offset = self.offset
         offset = self.offset
         header = self.header_no_crc_fmt.pack(size, TAG_PUT)
         header = self.header_no_crc_fmt.pack(size, TAG_PUT)
         crc = self.crc_fmt.pack(crc32(data, crc32(id, crc32(header))) & 0xffffffff)
         crc = self.crc_fmt.pack(crc32(data, crc32(id, crc32(header))) & 0xffffffff)
@@ -771,3 +775,6 @@ class LoggedIO:
             self._write_fd.close()
             self._write_fd.close()
             sync_dir(os.path.dirname(self._write_fd.name))
             sync_dir(os.path.dirname(self._write_fd.name))
             self._write_fd = None
             self._write_fd = None
+
+
+MAX_DATA_SIZE = MAX_OBJECT_SIZE - LoggedIO.put_header_fmt.size

+ 8 - 1
borg/testsuite/repository.py

@@ -8,7 +8,7 @@ from ..hashindex import NSIndex
 from ..helpers import Location, IntegrityError
 from ..helpers import Location, IntegrityError
 from ..locking import Lock, LockFailed
 from ..locking import Lock, LockFailed
 from ..remote import RemoteRepository, InvalidRPCMethod
 from ..remote import RemoteRepository, InvalidRPCMethod
-from ..repository import Repository, LoggedIO, TAG_COMMIT
+from ..repository import Repository, LoggedIO, TAG_COMMIT, MAX_DATA_SIZE
 from . import BaseTestCase
 from . import BaseTestCase
 
 
 
 
@@ -128,6 +128,13 @@ class RepositoryTestCase(RepositoryTestCaseBase):
         self.assert_equal(second_half, all[50:])
         self.assert_equal(second_half, all[50:])
         self.assert_equal(len(self.repository.list(limit=50)), 50)
         self.assert_equal(len(self.repository.list(limit=50)), 50)
 
 
+    def test_max_data_size(self):
+        max_data = b'x' * MAX_DATA_SIZE
+        self.repository.put(b'00000000000000000000000000000000', max_data)
+        self.assert_equal(self.repository.get(b'00000000000000000000000000000000'), max_data)
+        self.assert_raises(IntegrityError,
+                           lambda: self.repository.put(b'00000000000000000000000000000001', max_data + b'x'))
+
 
 
 class RepositoryCommitTestCase(RepositoryTestCaseBase):
 class RepositoryCommitTestCase(RepositoryTestCaseBase):
 
 

+ 8 - 0
docs/changes.rst

@@ -57,6 +57,14 @@ Security fixes:
 
 
 - fix security issue with remote repository access, #1428
 - fix security issue with remote repository access, #1428
 
 
+Bug fixes:
+
+- do not write objects to repository that are bigger than the allowed size,
+  borg will reject reading them, #1451.
+  IMPORTANT: if you created archives with many millions of files or
+             directories, please verify if you can open them successfully,
+             e.g. try a "borg list REPO::ARCHIVE".
+
 
 
 Version 1.0.7rc1 (2016-08-05)
 Version 1.0.7rc1 (2016-08-05)
 -----------------------------
 -----------------------------