|
@@ -566,24 +566,25 @@ Utilization of max. archive size: {csize_max:.0%}
|
|
|
if stat.S_ISREG(mode):
|
|
|
with backup_io('makedirs'):
|
|
|
make_parent(path)
|
|
|
+ hardlink_set = False
|
|
|
# Hard link?
|
|
|
if 'source' in item:
|
|
|
source = os.path.join(dest, *item.source.split(os.sep)[stripped_components:])
|
|
|
- with backup_io('link'):
|
|
|
- if item.source not in hardlink_masters:
|
|
|
- os.link(source, path)
|
|
|
- return
|
|
|
- item.chunks, link_target = hardlink_masters[item.source]
|
|
|
+ chunks, link_target = hardlink_masters.get(item.source, (None, source))
|
|
|
if link_target:
|
|
|
# Hard link was extracted previously, just link
|
|
|
- with backup_io:
|
|
|
+ with backup_io('link'):
|
|
|
os.link(link_target, path)
|
|
|
- return
|
|
|
- # Extract chunks, since the item which had the chunks was not extracted
|
|
|
- with backup_io('open'):
|
|
|
- fd = open(path, 'wb')
|
|
|
+ hardlink_set = True
|
|
|
+ elif chunks is not None:
|
|
|
+ # assign chunks to this item, since the item which had the chunks was not extracted
|
|
|
+ item.chunks = chunks
|
|
|
+ if hardlink_set:
|
|
|
+ return
|
|
|
if sparse and self.zeros is None:
|
|
|
self.zeros = b'\0' * (1 << self.chunker_params[1])
|
|
|
+ with backup_io('open'):
|
|
|
+ fd = open(path, 'wb')
|
|
|
with fd:
|
|
|
ids = [c.id for c in item.chunks]
|
|
|
for data in self.pipeline.fetch_many(ids, is_preloaded=True):
|
|
@@ -595,7 +596,7 @@ Utilization of max. archive size: {csize_max:.0%}
|
|
|
fd.seek(len(data), 1)
|
|
|
else:
|
|
|
fd.write(data)
|
|
|
- with backup_io('truncate'):
|
|
|
+ with backup_io('truncate_and_attrs'):
|
|
|
pos = item_chunks_size = fd.tell()
|
|
|
fd.truncate(pos)
|
|
|
fd.flush()
|
|
@@ -608,7 +609,7 @@ Utilization of max. archive size: {csize_max:.0%}
|
|
|
if has_damaged_chunks:
|
|
|
logger.warning('File %s has damaged (all-zero) chunks. Try running borg check --repair.' %
|
|
|
remove_surrogates(item.path))
|
|
|
- if hardlink_masters:
|
|
|
+ if not hardlink_set and hardlink_masters: # 2nd term, is it correct/needed?
|
|
|
# Update master entry with extracted file path, so that following hardlinks don't extract twice.
|
|
|
hardlink_masters[item.get('source') or original_path] = (None, path)
|
|
|
return
|