Browse Source

Merge pull request #3900 from ThomasWaldmann/update-fuse-code

use unpacker.tell() instead of deprecated write_bytes, fixes #3899
TW 7 years ago
parent
commit
86a91af67a
1 changed files with 13 additions and 11 deletions
  1. 13 11
      src/borg/fuse.py

+ 13 - 11
src/borg/fuse.py

@@ -126,7 +126,7 @@ class ItemCache:
         stream_offset = 0
         # Offset of the current chunk in the metadata stream
         chunk_begin = 0
-        # Length of the chunk preciding the current chunk
+        # Length of the chunk preceding the current chunk
         last_chunk_length = 0
         msgpacked_bytes = b''
 
@@ -134,13 +134,6 @@ class ItemCache:
         meta = self.meta
         pack_indirect_into = self.indirect_entry_struct.pack_into
 
-        def write_bytes(append_msgpacked_bytes):
-            # XXX: Future versions of msgpack include an Unpacker.tell() method that provides this for free.
-            nonlocal msgpacked_bytes
-            nonlocal stream_offset
-            msgpacked_bytes += append_msgpacked_bytes
-            stream_offset += len(append_msgpacked_bytes)
-
         for key, (csize, data) in zip(archive_item_ids, self.decrypted_repository.get_many(archive_item_ids)):
             # Store the chunk ID in the meta-array
             if write_offset + 32 >= len(meta):
@@ -149,16 +142,25 @@ class ItemCache:
             current_id_offset = write_offset
             write_offset += 32
 
-            # The chunk boundaries cannot be tracked through write_bytes, because the unpack state machine
-            # *can* and *will* consume partial items, so calls to write_bytes are unrelated to chunk boundaries.
             chunk_begin += last_chunk_length
             last_chunk_length = len(data)
 
             unpacker.feed(data)
             while True:
                 try:
-                    item = unpacker.unpack(write_bytes)
+                    item = unpacker.unpack()
+                    need_more_data = False
                 except msgpack.OutOfData:
+                    need_more_data = True
+
+                start = stream_offset - chunk_begin
+                # tell() is not helpful for the need_more_data case, but we know it is the remainder
+                # of the data in that case. in the other case, tell() works as expected.
+                length = (len(data) - start) if need_more_data else (unpacker.tell() - stream_offset)
+                msgpacked_bytes += data[start:start+length]
+                stream_offset += length
+
+                if need_more_data:
                     # Need more data, feed the next chunk
                     break