瀏覽代碼

Merge branch 'master' into compression

Thomas Waldmann 9 年之前
父節點
當前提交
bf757738f7
共有 7 個文件被更改,包括 94 次插入23 次删除
  1. 24 0
      CHANGES.rst
  2. 10 5
      borg/_hashindex.c
  3. 8 3
      borg/archiver.py
  4. 2 2
      borg/remote.py
  5. 6 2
      borg/repository.py
  6. 37 4
      borg/testsuite/archiver.py
  7. 7 7
      docs/installation.rst

+ 24 - 0
CHANGES.rst

@@ -34,6 +34,30 @@ New features:
 - create --compression lzma,N (slowest, highest compression, default N is 6)
 - create --compression lzma,N (slowest, highest compression, default N is 6)
 
 
 
 
+Version 0.25.0 (not released yet)
+---------------------------------
+
+Incompatible changes (compared to 0.24):
+
+- none yet
+
+Deprecations:
+
+- none yet
+
+New features:
+
+- honor the nodump flag (UF_NODUMP) and do not backup such items
+
+Bug fixes:
+
+- close fds of segments we delete (during compaction)
+
+Other changes:
+
+- none yet
+
+
 Version 0.24.0
 Version 0.24.0
 --------------
 --------------
 
 

+ 10 - 5
borg/_hashindex.c

@@ -145,10 +145,12 @@ hashindex_read(const char *path)
     bytes_read = fread(&header, 1, sizeof(HashHeader), fd);
     bytes_read = fread(&header, 1, sizeof(HashHeader), fd);
     if(bytes_read != sizeof(HashHeader)) {
     if(bytes_read != sizeof(HashHeader)) {
         if(ferror(fd)) {
         if(ferror(fd)) {
-            EPRINTF_PATH(path, "fread header failed (expected %ld, got %ld)", sizeof(HashHeader), bytes_read);
+            EPRINTF_PATH(path, "fread header failed (expected %ju, got %ju)",
+                         (uintmax_t) sizeof(HashHeader), (uintmax_t) bytes_read);
         }
         }
         else {
         else {
-            EPRINTF_MSG_PATH(path, "fread header failed (expected %ld, got %ld)", sizeof(HashHeader), bytes_read);
+            EPRINTF_MSG_PATH(path, "fread header failed (expected %ju, got %ju)",
+                             (uintmax_t) sizeof(HashHeader), (uintmax_t) bytes_read);
         }
         }
         goto fail;
         goto fail;
     }
     }
@@ -170,7 +172,8 @@ hashindex_read(const char *path)
     }
     }
     buckets_length = (off_t)_le32toh(header.num_buckets) * (header.key_size + header.value_size);
     buckets_length = (off_t)_le32toh(header.num_buckets) * (header.key_size + header.value_size);
     if(length != sizeof(HashHeader) + buckets_length) {
     if(length != sizeof(HashHeader) + buckets_length) {
-        EPRINTF_MSG_PATH(path, "Incorrect file length (expected %ld, got %ld)", sizeof(HashHeader) + buckets_length, length);
+        EPRINTF_MSG_PATH(path, "Incorrect file length (expected %ju, got %ju)",
+                         (uintmax_t) sizeof(HashHeader) + buckets_length, (uintmax_t) length);
         goto fail;
         goto fail;
     }
     }
     if(!(index = malloc(sizeof(HashIndex)))) {
     if(!(index = malloc(sizeof(HashIndex)))) {
@@ -186,10 +189,12 @@ hashindex_read(const char *path)
     bytes_read = fread(index->buckets, 1, buckets_length, fd);
     bytes_read = fread(index->buckets, 1, buckets_length, fd);
     if(bytes_read != buckets_length) {
     if(bytes_read != buckets_length) {
         if(ferror(fd)) {
         if(ferror(fd)) {
-            EPRINTF_PATH(path, "fread buckets failed (expected %ld, got %ld)", buckets_length, bytes_read);
+            EPRINTF_PATH(path, "fread buckets failed (expected %ju, got %ju)",
+                         (uintmax_t) buckets_length, (uintmax_t) bytes_read);
         }
         }
         else {
         else {
-            EPRINTF_MSG_PATH(path, "fread buckets failed (expected %ld, got %ld)", buckets_length, bytes_read);
+            EPRINTF_MSG_PATH(path, "fread buckets failed (expected %ju, got %ju)",
+                             (uintmax_t) buckets_length, (uintmax_t) bytes_read);
         }
         }
         free(index->buckets);
         free(index->buckets);
         free(index);
         free(index);

+ 8 - 3
borg/archiver.py

@@ -25,6 +25,8 @@ from .helpers import Error, location_validator, format_time, format_file_size, \
     is_cachedir, bigint_to_int, ChunkerParams, CompressionSpec
     is_cachedir, bigint_to_int, ChunkerParams, CompressionSpec
 from .remote import RepositoryServer, RemoteRepository
 from .remote import RepositoryServer, RemoteRepository
 
 
+has_lchflags = hasattr(os, 'lchflags')
+
 
 
 class Archiver:
 class Archiver:
 
 
@@ -175,6 +177,9 @@ Type "Yes I am sure" if you understand this and want to continue.\n""")
         if restrict_dev and st.st_dev != restrict_dev:
         if restrict_dev and st.st_dev != restrict_dev:
             return
             return
         status = None
         status = None
+        # Ignore if nodump flag is set
+        if has_lchflags and (st.st_flags & stat.UF_NODUMP):
+            return
         if stat.S_ISREG(st.st_mode):
         if stat.S_ISREG(st.st_mode):
             try:
             try:
                 status = archive.process_file(path, st, cache)
                 status = archive.process_file(path, st, cache)
@@ -867,7 +872,7 @@ Type "Yes I am sure" if you understand this and want to continue.\n""")
         return args.func(args)
         return args.func(args)
 
 
 
 
-def sig_info_handler(signum, stack):
+def sig_info_handler(signum, stack):  # pragma: no cover
     """search the stack for infos about the currently processed file and print them"""
     """search the stack for infos about the currently processed file and print them"""
     for frame in inspect.getouterframes(stack):
     for frame in inspect.getouterframes(stack):
         func, loc = frame[3], frame[0].f_locals
         func, loc = frame[3], frame[0].f_locals
@@ -890,7 +895,7 @@ def sig_info_handler(signum, stack):
             break
             break
 
 
 
 
-def setup_signal_handlers():
+def setup_signal_handlers():  # pragma: no cover
     sigs = []
     sigs = []
     if hasattr(signal, 'SIGUSR1'):
     if hasattr(signal, 'SIGUSR1'):
         sigs.append(signal.SIGUSR1)  # kill -USR1 pid
         sigs.append(signal.SIGUSR1)  # kill -USR1 pid
@@ -900,7 +905,7 @@ def setup_signal_handlers():
         signal.signal(sig, sig_info_handler)
         signal.signal(sig, sig_info_handler)
 
 
 
 
-def main():
+def main():  # pragma: no cover
     # Make sure stdout and stderr have errors='replace') to avoid unicode
     # Make sure stdout and stderr have errors='replace') to avoid unicode
     # issues when print()-ing unicode file names
     # issues when print()-ing unicode file names
     sys.stdout = io.TextIOWrapper(sys.stdout.buffer, sys.stdout.encoding, 'replace', line_buffering=True)
     sys.stdout = io.TextIOWrapper(sys.stdout.buffer, sys.stdout.encoding, 'replace', line_buffering=True)

+ 2 - 2
borg/remote.py

@@ -28,7 +28,7 @@ class InvalidRPCMethod(Error):
     """RPC method is not valid"""
     """RPC method is not valid"""
 
 
 
 
-class RepositoryServer:
+class RepositoryServer:  # pragma: no cover
     rpc_methods = (
     rpc_methods = (
         '__len__',
         '__len__',
         'check',
         'check',
@@ -129,7 +129,7 @@ class RemoteRepository:
         umask = ['--umask', '%03o' % self.umask]
         umask = ['--umask', '%03o' % self.umask]
         if location.host == '__testsuite__':
         if location.host == '__testsuite__':
             args = [sys.executable, '-m', 'borg.archiver', 'serve'] + umask + self.extra_test_args
             args = [sys.executable, '-m', 'borg.archiver', 'serve'] + umask + self.extra_test_args
-        else:
+        else:  # pragma: no cover
             args = ['ssh']
             args = ['ssh']
             if location.port:
             if location.port:
                 args += ['-p', str(location.port)]
                 args += ['-p', str(location.port)]

+ 6 - 2
borg/repository.py

@@ -334,7 +334,6 @@ class Repository:
             report_error('Adding commit tag to segment {}'.format(transaction_id))
             report_error('Adding commit tag to segment {}'.format(transaction_id))
             self.io.segment = transaction_id + 1
             self.io.segment = transaction_id + 1
             self.io.write_commit()
             self.io.write_commit()
-            self.io.close_segment()
         if current_index and not repair:
         if current_index and not repair:
             if len(current_index) != len(self.index):
             if len(current_index) != len(self.index):
                 report_error('Index object count mismatch. {} != {}'.format(len(current_index), len(self.index)))
                 report_error('Index object count mismatch. {} != {}'.format(len(current_index), len(self.index)))
@@ -517,6 +516,9 @@ class LoggedIO:
             return fd
             return fd
 
 
     def delete_segment(self, segment):
     def delete_segment(self, segment):
+        fd = self.fds.pop(segment)
+        if fd is not None:
+            fd.close()
         try:
         try:
             os.unlink(self.segment_filename(segment))
             os.unlink(self.segment_filename(segment))
         except OSError:
         except OSError:
@@ -559,7 +561,9 @@ class LoggedIO:
             header = fd.read(self.header_fmt.size)
             header = fd.read(self.header_fmt.size)
 
 
     def recover_segment(self, segment, filename):
     def recover_segment(self, segment, filename):
-        self.fds.pop(segment).close()
+        fd = self.fds.pop(segment)
+        if fd is not None:
+            fd.close()
         # FIXME: save a copy of the original file
         # FIXME: save a copy of the original file
         with open(filename, 'rb') as fd:
         with open(filename, 'rb') as fd:
             data = memoryview(fd.read())
             data = memoryview(fd.read())

+ 37 - 4
borg/testsuite/archiver.py

@@ -183,14 +183,19 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         self.create_test_files()
         self.create_test_files()
         self.cmd('init', self.repository_location)
         self.cmd('init', self.repository_location)
         self.cmd('create', self.repository_location + '::test', 'input')
         self.cmd('create', self.repository_location + '::test', 'input')
-        self.cmd('create', self.repository_location + '::test.2', 'input')
+        self.cmd('create', '--stats', self.repository_location + '::test.2', 'input')
         with changedir('output'):
         with changedir('output'):
             self.cmd('extract', self.repository_location + '::test')
             self.cmd('extract', self.repository_location + '::test')
         self.assert_equal(len(self.cmd('list', self.repository_location).splitlines()), 2)
         self.assert_equal(len(self.cmd('list', self.repository_location).splitlines()), 2)
-        self.assert_equal(len(self.cmd('list', self.repository_location + '::test').splitlines()), 11)
+        item_count = 10 if has_lchflags else 11  # one file is UF_NODUMP
+        self.assert_equal(len(self.cmd('list', self.repository_location + '::test').splitlines()), item_count)
+        if has_lchflags:
+            # remove the file we did not backup, so input and output become equal
+            os.remove(os.path.join('input', 'flagfile'))
         self.assert_dirs_equal('input', 'output/input')
         self.assert_dirs_equal('input', 'output/input')
         info_output = self.cmd('info', self.repository_location + '::test')
         info_output = self.cmd('info', self.repository_location + '::test')
-        self.assert_in('Number of files: 4', info_output)
+        item_count = 3 if has_lchflags else 4  # one file is UF_NODUMP
+        self.assert_in('Number of files: %d' % item_count, info_output)
         shutil.rmtree(self.cache_path)
         shutil.rmtree(self.cache_path)
         with environment_variable(BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK='1'):
         with environment_variable(BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK='1'):
             info_output2 = self.cmd('info', self.repository_location + '::test')
             info_output2 = self.cmd('info', self.repository_location + '::test')
@@ -403,7 +408,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         self.cmd('extract', '--dry-run', self.repository_location + '::test.2')
         self.cmd('extract', '--dry-run', self.repository_location + '::test.2')
         self.cmd('delete', self.repository_location + '::test')
         self.cmd('delete', self.repository_location + '::test')
         self.cmd('extract', '--dry-run', self.repository_location + '::test.2')
         self.cmd('extract', '--dry-run', self.repository_location + '::test.2')
-        self.cmd('delete', self.repository_location + '::test.2')
+        self.cmd('delete', '--stats', self.repository_location + '::test.2')
         # Make sure all data except the manifest has been deleted
         # Make sure all data except the manifest has been deleted
         repository = Repository(self.repository_path)
         repository = Repository(self.repository_path)
         self.assert_equal(len(repository), 1)
         self.assert_equal(len(repository), 1)
@@ -470,10 +475,38 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         self.assert_not_in('test1', output)
         self.assert_not_in('test1', output)
         self.assert_in('test2', output)
         self.assert_in('test2', output)
 
 
+    def test_prune_repository_prefix(self):
+        self.cmd('init', self.repository_location)
+        self.cmd('create', self.repository_location + '::foo-2015-08-12-10:00', src_dir)
+        self.cmd('create', self.repository_location + '::foo-2015-08-12-20:00', src_dir)
+        self.cmd('create', self.repository_location + '::bar-2015-08-12-10:00', src_dir)
+        self.cmd('create', self.repository_location + '::bar-2015-08-12-20:00', src_dir)
+        output = self.cmd('prune', '-v', '--dry-run', self.repository_location, '--keep-daily=2', '--prefix=foo-')
+        self.assert_in('Keeping archive: foo-2015-08-12-20:00', output)
+        self.assert_in('Would prune:     foo-2015-08-12-10:00', output)
+        output = self.cmd('list', self.repository_location)
+        self.assert_in('foo-2015-08-12-10:00', output)
+        self.assert_in('foo-2015-08-12-20:00', output)
+        self.assert_in('bar-2015-08-12-10:00', output)
+        self.assert_in('bar-2015-08-12-20:00', output)
+        self.cmd('prune', self.repository_location, '--keep-daily=2', '--prefix=foo-')
+        output = self.cmd('list', self.repository_location)
+        self.assert_not_in('foo-2015-08-12-10:00', output)
+        self.assert_in('foo-2015-08-12-20:00', output)
+        self.assert_in('bar-2015-08-12-10:00', output)
+        self.assert_in('bar-2015-08-12-20:00', output)
+
     def test_usage(self):
     def test_usage(self):
         self.assert_raises(SystemExit, lambda: self.cmd())
         self.assert_raises(SystemExit, lambda: self.cmd())
         self.assert_raises(SystemExit, lambda: self.cmd('-h'))
         self.assert_raises(SystemExit, lambda: self.cmd('-h'))
 
 
+    def test_help(self):
+        assert 'Borg' in self.cmd('help')
+        assert 'patterns' in self.cmd('help', 'patterns')
+        assert 'Initialize' in self.cmd('help', 'init')
+        assert 'positional arguments' not in self.cmd('help', 'init', '--epilog-only')
+        assert 'This command initializes' not in self.cmd('help', 'init', '--usage-only')
+
     @unittest.skipUnless(has_llfuse, 'llfuse not installed')
     @unittest.skipUnless(has_llfuse, 'llfuse not installed')
     def test_fuse_mount_repository(self):
     def test_fuse_mount_repository(self):
         mountpoint = os.path.join(self.tmpdir, 'mountpoint')
         mountpoint = os.path.join(self.tmpdir, 'mountpoint')

+ 7 - 7
docs/installation.rst

@@ -66,13 +66,11 @@ Some of the steps detailled below might be useful also for non-git installs.
     # if you do not have gcc / make / etc. yet
     # if you do not have gcc / make / etc. yet
     apt-get install build-essential
     apt-get install build-essential
 
 
-    # optional: lowlevel FUSE py binding - to mount backup archives
+    # optional: FUSE support - to mount backup archives
     # in case you get complaints about permission denied on /etc/fuse.conf:
     # in case you get complaints about permission denied on /etc/fuse.conf:
     # on ubuntu this means your user is not in the "fuse" group. just add
     # on ubuntu this means your user is not in the "fuse" group. just add
     # yourself there, log out and log in again.
     # yourself there, log out and log in again.
-    # if it complains about not being able to find llfuse: make a symlink
-    # borg-env/lib/python3.4/site-packages/llfuse -> /usr/lib/python3/dist-packages/llfuse
-    apt-get install python3-llfuse fuse
+    apt-get install libfuse-dev fuse
 
 
     # optional: for unit testing
     # optional: for unit testing
     apt-get install fakeroot
     apt-get install fakeroot
@@ -88,6 +86,7 @@ Some of the steps detailled below might be useful also for non-git installs.
     pip install cython  # compile .pyx -> .c
     pip install cython  # compile .pyx -> .c
     pip install tox pytest  # optional, for running unit tests
     pip install tox pytest  # optional, for running unit tests
     pip install sphinx  # optional, to build the docs
     pip install sphinx  # optional, to build the docs
+    pip install llfuse  # optional, for FUSE support
     cd borg
     cd borg
     pip install -e .  # in-place editable mode
     pip install -e .  # in-place editable mode
 
 
@@ -115,9 +114,9 @@ Some of the steps detailled below might be useful also for non-git installs.
     # lz4 super fast compression support Headers + Library
     # lz4 super fast compression support Headers + Library
     sudo dnf install lz4
     sudo dnf install lz4
 
 
-    # optional: lowlevel FUSE py binding - to mount backup archives
-    sudo dnf install python3-llfuse fuse
-
+    # optional: FUSE support - to mount backup archives
+    sudo dnf install fuse-devel fuse
+    
     # optional: for unit testing
     # optional: for unit testing
     sudo dnf install fakeroot
     sudo dnf install fakeroot
 
 
@@ -132,6 +131,7 @@ Some of the steps detailled below might be useful also for non-git installs.
     pip install cython  # compile .pyx -> .c
     pip install cython  # compile .pyx -> .c
     pip install tox pytest  # optional, for running unit tests
     pip install tox pytest  # optional, for running unit tests
     pip install sphinx  # optional, to build the docs
     pip install sphinx  # optional, to build the docs
+    pip install llfuse  # optional, for FUSE support
     cd borg
     cd borg
     pip install -e .  # in-place editable mode
     pip install -e .  # in-place editable mode