| 
					
				 | 
			
			
				@@ -25,7 +25,7 @@ from .compress import Compressor, CompressionSpec 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				 from .constants import *  # NOQA 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				 from .hashindex import ChunkIndex, ChunkIndexEntry 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				 from .helpers import Manifest 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-from .helpers import Chunk, ChunkIteratorFileWrapper, open_item 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+from .helpers import ChunkIteratorFileWrapper, open_item 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				 from .helpers import Error, IntegrityError, set_ec 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				 from .helpers import uid2user, user2uid, gid2group, group2gid 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				 from .helpers import parse_timestamp, to_localtime 
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -195,7 +195,7 @@ class DownloadPipeline: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         otherwise preloaded chunks will accumulate in RemoteRepository and create a memory leak. 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         """ 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         unpacker = msgpack.Unpacker(use_list=False) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-        for _, data in self.fetch_many(ids): 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+        for data in self.fetch_many(ids): 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             unpacker.feed(data) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             items = [Item(internal_dict=item) for item in unpacker] 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             for item in items: 
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -237,7 +237,9 @@ class ChunkBuffer: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         if self.buffer.tell() == 0: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             return 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         self.buffer.seek(0) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-        chunks = list(Chunk(bytes(s)) for s in self.chunker.chunkify(self.buffer)) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+        # The chunker returns a memoryview to its internal buffer, 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+        # thus a copy is needed before resuming the chunker iterator. 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+        chunks = list(bytes(s) for s in self.chunker.chunkify(self.buffer)) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         self.buffer.seek(0) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         self.buffer.truncate(0) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         # Leave the last partial chunk in the buffer unless flush is True 
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -245,7 +247,7 @@ class ChunkBuffer: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         for chunk in chunks[:end]: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             self.chunks.append(self.write_chunk(chunk)) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         if end == -1: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-            self.buffer.write(chunks[-1].data) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+            self.buffer.write(chunks[-1]) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				  
			 | 
		
	
		
			
				 | 
				 | 
			
			
				     def is_full(self): 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         return self.buffer.tell() > self.BUFFER_SIZE 
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -259,7 +261,7 @@ class CacheChunkBuffer(ChunkBuffer): 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         self.stats = stats 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				  
			 | 
		
	
		
			
				 | 
				 | 
			
			
				     def write_chunk(self, chunk): 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-        id_, _, _ = self.cache.add_chunk(self.key.id_hash(chunk.data), chunk, self.stats, wait=False) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+        id_, _, _ = self.cache.add_chunk(self.key.id_hash(chunk), chunk, self.stats, wait=False) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         self.cache.repository.async_response(wait=False) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         return id_ 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				  
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -325,7 +327,7 @@ class Archive: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             self.zeros = None 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				  
			 | 
		
	
		
			
				 | 
				 | 
			
			
				     def _load_meta(self, id): 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-        _, data = self.key.decrypt(id, self.repository.get(id)) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+        data = self.key.decrypt(id, self.repository.get(id)) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         metadata = ArchiveItem(internal_dict=msgpack.unpackb(data, unicode_errors='surrogateescape')) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         if metadata.version != 1: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             raise Exception('Unknown archive metadata version') 
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -464,7 +466,7 @@ Utilization of max. archive size: {csize_max:.0%} 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         metadata = ArchiveItem(metadata) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         data = self.key.pack_and_authenticate_metadata(metadata.as_dict(), context=b'archive') 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         self.id = self.key.id_hash(data) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-        self.cache.add_chunk(self.id, Chunk(data), self.stats) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+        self.cache.add_chunk(self.id, data, self.stats) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         while self.repository.async_response(wait=True) is not None: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             pass 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         self.manifest.archives[name] = (self.id, metadata.time) 
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -490,7 +492,7 @@ Utilization of max. archive size: {csize_max:.0%} 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         add(self.id) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         for id, chunk in zip(self.metadata.items, self.repository.get_many(self.metadata.items)): 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             add(id) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-            _, data = self.key.decrypt(id, chunk) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+            data = self.key.decrypt(id, chunk) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             unpacker.feed(data) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             for item in unpacker: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 chunks = item.get(b'chunks') 
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -520,7 +522,7 @@ Utilization of max. archive size: {csize_max:.0%} 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         if dry_run or stdout: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             if 'chunks' in item: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 item_chunks_size = 0 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-                for _, data in self.pipeline.fetch_many([c.id for c in item.chunks], is_preloaded=True): 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+                for data in self.pipeline.fetch_many([c.id for c in item.chunks], is_preloaded=True): 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                     if pi: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                         pi.show(increase=len(data), info=[remove_surrogates(item.path)]) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                     if stdout: 
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -584,7 +586,7 @@ Utilization of max. archive size: {csize_max:.0%} 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 self.zeros = b'\0' * (1 << self.chunker_params[1]) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             with fd: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 ids = [c.id for c in item.chunks] 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-                for _, data in self.pipeline.fetch_many(ids, is_preloaded=True): 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+                for data in self.pipeline.fetch_many(ids, is_preloaded=True): 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                     if pi: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                         pi.show(increase=len(data), info=[remove_surrogates(item.path)]) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                     with backup_io('write'): 
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -712,7 +714,7 @@ Utilization of max. archive size: {csize_max:.0%} 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         setattr(metadata, key, value) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         data = msgpack.packb(metadata.as_dict(), unicode_errors='surrogateescape') 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         new_id = self.key.id_hash(data) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-        self.cache.add_chunk(new_id, Chunk(data), self.stats) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+        self.cache.add_chunk(new_id, data, self.stats) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         self.manifest.archives[self.name] = (new_id, metadata.time) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         self.cache.chunk_decref(self.id, self.stats) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         self.id = new_id 
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -759,7 +761,7 @@ Utilization of max. archive size: {csize_max:.0%} 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             for (i, (items_id, data)) in enumerate(zip(items_ids, self.repository.get_many(items_ids))): 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 if progress: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                     pi.show(i) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-                _, data = self.key.decrypt(items_id, data) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+                data = self.key.decrypt(items_id, data) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 unpacker.feed(data) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 chunk_decref(items_id, stats) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 try: 
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -874,10 +876,10 @@ Utilization of max. archive size: {csize_max:.0%} 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         self.write_checkpoint() 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         return length, number 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				  
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-    def chunk_file(self, item, cache, stats, chunk_iter, chunk_processor=None, **chunk_kw): 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+    def chunk_file(self, item, cache, stats, chunk_iter, chunk_processor=None): 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         if not chunk_processor: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             def chunk_processor(data): 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-                chunk_entry = cache.add_chunk(self.key.id_hash(data), Chunk(data, **chunk_kw), stats, wait=False) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+                chunk_entry = cache.add_chunk(self.key.id_hash(data), data, stats, wait=False) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 self.cache.repository.async_response(wait=False) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 return chunk_entry 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				  
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -1205,9 +1207,9 @@ class ArchiveChecker: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                         chunk_ids = list(reversed(chunk_ids_revd)) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                         chunk_data_iter = self.repository.get_many(chunk_ids) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 else: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+                    _chunk_id = None if chunk_id == Manifest.MANIFEST_ID else chunk_id 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                     try: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-                        _chunk_id = None if chunk_id == Manifest.MANIFEST_ID else chunk_id 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-                        _, data = self.key.decrypt(_chunk_id, encrypted_data) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+                        self.key.decrypt(_chunk_id, encrypted_data) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                     except IntegrityError as integrity_error: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                         self.error_found = True 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                         errors += 1 
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -1277,7 +1279,7 @@ class ArchiveChecker: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         for chunk_id, _ in self.chunks.iteritems(): 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             cdata = self.repository.get(chunk_id) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             try: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-                _, data = self.key.decrypt(chunk_id, cdata) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+                data = self.key.decrypt(chunk_id, cdata) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             except IntegrityError as exc: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 logger.error('Skipping corrupted chunk: %s', exc) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 self.error_found = True 
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -1322,9 +1324,9 @@ class ArchiveChecker: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 self.possibly_superseded.add(id_) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				  
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         def add_callback(chunk): 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-            id_ = self.key.id_hash(chunk.data) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+            id_ = self.key.id_hash(chunk) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             cdata = self.key.encrypt(chunk) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-            add_reference(id_, len(chunk.data), len(cdata), cdata) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+            add_reference(id_, len(chunk), len(cdata), cdata) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             return id_ 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				  
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         def add_reference(id_, size, csize, cdata=None): 
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -1345,7 +1347,7 @@ class ArchiveChecker: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             def replacement_chunk(size): 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 data = bytes(size) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 chunk_id = self.key.id_hash(data) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-                cdata = self.key.encrypt(Chunk(data)) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+                cdata = self.key.encrypt(data) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 csize = len(cdata) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 return chunk_id, size, csize, cdata 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				  
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -1454,7 +1456,7 @@ class ArchiveChecker: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 if state > 0: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                     unpacker.resync() 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 for chunk_id, cdata in zip(items, repository.get_many(items)): 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-                    _, data = self.key.decrypt(chunk_id, cdata) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+                    data = self.key.decrypt(chunk_id, cdata) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                     unpacker.feed(data) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                     try: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                         for item in unpacker: 
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -1504,7 +1506,7 @@ class ArchiveChecker: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                     continue 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 mark_as_possibly_superseded(archive_id) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 cdata = self.repository.get(archive_id) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-                _, data = self.key.decrypt(archive_id, cdata) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+                data = self.key.decrypt(archive_id, cdata) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 archive = ArchiveItem(internal_dict=msgpack.unpackb(data)) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 if archive.version != 1: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                     raise Exception('Unknown archive metadata version') 
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -1521,7 +1523,7 @@ class ArchiveChecker: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 archive.items = items_buffer.chunks 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 data = msgpack.packb(archive.as_dict(), unicode_errors='surrogateescape') 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 new_archive_id = self.key.id_hash(data) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-                cdata = self.key.encrypt(Chunk(data)) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+                cdata = self.key.encrypt(data) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 add_reference(new_archive_id, len(data), len(cdata), cdata) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 self.manifest.archives[info.name] = (new_archive_id, info.ts) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				  
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -1655,11 +1657,10 @@ class ArchiveRecreater: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         if self.recompress and not self.always_recompress and chunk_id in self.cache.chunks: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             # Check if this chunk is already compressed the way we want it 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             old_chunk = self.key.decrypt(None, self.repository.get(chunk_id), decompress=False) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-            if Compressor.detect(old_chunk.data).name == self.key.compressor.decide(data).name: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+            if Compressor.detect(old_chunk).name == self.key.compressor.decide(data).name: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 # Stored chunk has the same compression we wanted 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				                 overwrite = False 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-        chunk = Chunk(data) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-        chunk_entry = self.cache.add_chunk(chunk_id, chunk, target.stats, overwrite=overwrite, wait=False) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+        chunk_entry = self.cache.add_chunk(chunk_id, data, target.stats, overwrite=overwrite, wait=False) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         self.cache.repository.async_response(wait=False) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         self.seen_chunks.add(chunk_entry.id) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         return chunk_entry 
			 | 
		
	
	
		
			
				| 
					
				 | 
			
			
				@@ -1673,7 +1674,7 @@ class ArchiveRecreater: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             yield from target.chunker.chunkify(file) 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         else: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				             for chunk in chunk_iterator: 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				-                yield chunk.data 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				+                yield chunk 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				  
			 | 
		
	
		
			
				 | 
				 | 
			
			
				     def save(self, archive, target, comment=None, replace_original=True): 
			 | 
		
	
		
			
				 | 
				 | 
			
			
				         if self.dry_run: 
			 |