|
@@ -29,12 +29,11 @@ from .helpers import Error, IntegrityError
|
|
from .helpers import uid2user, user2uid, gid2group, group2gid
|
|
from .helpers import uid2user, user2uid, gid2group, group2gid
|
|
from .helpers import parse_timestamp, to_localtime
|
|
from .helpers import parse_timestamp, to_localtime
|
|
from .helpers import format_time, format_timedelta, format_file_size, file_status
|
|
from .helpers import format_time, format_timedelta, format_file_size, file_status
|
|
-from .helpers import safe_encode, safe_decode, make_path_safe, remove_surrogates, swidth_slice
|
|
|
|
-from .helpers import decode_dict, StableDict
|
|
|
|
-from .helpers import int_to_bigint, bigint_to_int, bin_to_hex
|
|
|
|
|
|
+from .helpers import safe_encode, safe_decode, make_path_safe, remove_surrogates
|
|
|
|
+from .helpers import StableDict
|
|
|
|
+from .helpers import bin_to_hex
|
|
from .helpers import ellipsis_truncate, ProgressIndicatorPercent, log_multi
|
|
from .helpers import ellipsis_truncate, ProgressIndicatorPercent, log_multi
|
|
from .helpers import PathPrefixPattern, FnmatchPattern
|
|
from .helpers import PathPrefixPattern, FnmatchPattern
|
|
-from .helpers import consume, chunkit
|
|
|
|
from .helpers import CompressionDecider1, CompressionDecider2, CompressionSpec
|
|
from .helpers import CompressionDecider1, CompressionDecider2, CompressionSpec
|
|
from .item import Item, ArchiveItem
|
|
from .item import Item, ArchiveItem
|
|
from .key import key_factory
|
|
from .key import key_factory
|
|
@@ -125,19 +124,22 @@ class BackupOSError(Exception):
|
|
return str(self.os_error)
|
|
return str(self.os_error)
|
|
|
|
|
|
|
|
|
|
-@contextmanager
|
|
|
|
-def backup_io():
|
|
|
|
- """Context manager changing OSError to BackupOSError."""
|
|
|
|
- try:
|
|
|
|
- yield
|
|
|
|
- except OSError as os_error:
|
|
|
|
- raise BackupOSError(os_error) from os_error
|
|
|
|
|
|
+class BackupIO:
|
|
|
|
+ def __enter__(self):
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+ def __exit__(self, exc_type, exc_val, exc_tb):
|
|
|
|
+ if exc_type and issubclass(exc_type, OSError):
|
|
|
|
+ raise BackupOSError(exc_val) from exc_val
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+backup_io = BackupIO()
|
|
|
|
|
|
|
|
|
|
def backup_io_iter(iterator):
|
|
def backup_io_iter(iterator):
|
|
while True:
|
|
while True:
|
|
try:
|
|
try:
|
|
- with backup_io():
|
|
|
|
|
|
+ with backup_io:
|
|
item = next(iterator)
|
|
item = next(iterator)
|
|
except StopIteration:
|
|
except StopIteration:
|
|
return
|
|
return
|
|
@@ -475,13 +477,13 @@ Number of files: {0.stats.nfiles}'''.format(
|
|
pass
|
|
pass
|
|
mode = item.mode
|
|
mode = item.mode
|
|
if stat.S_ISREG(mode):
|
|
if stat.S_ISREG(mode):
|
|
- with backup_io():
|
|
|
|
|
|
+ with backup_io:
|
|
if not os.path.exists(os.path.dirname(path)):
|
|
if not os.path.exists(os.path.dirname(path)):
|
|
os.makedirs(os.path.dirname(path))
|
|
os.makedirs(os.path.dirname(path))
|
|
# Hard link?
|
|
# Hard link?
|
|
if 'source' in item:
|
|
if 'source' in item:
|
|
source = os.path.join(dest, *item.source.split(os.sep)[stripped_components:])
|
|
source = os.path.join(dest, *item.source.split(os.sep)[stripped_components:])
|
|
- with backup_io():
|
|
|
|
|
|
+ with backup_io:
|
|
if os.path.exists(path):
|
|
if os.path.exists(path):
|
|
os.unlink(path)
|
|
os.unlink(path)
|
|
if item.source not in hardlink_masters:
|
|
if item.source not in hardlink_masters:
|
|
@@ -490,24 +492,24 @@ Number of files: {0.stats.nfiles}'''.format(
|
|
item.chunks, link_target = hardlink_masters[item.source]
|
|
item.chunks, link_target = hardlink_masters[item.source]
|
|
if link_target:
|
|
if link_target:
|
|
# Hard link was extracted previously, just link
|
|
# Hard link was extracted previously, just link
|
|
- with backup_io():
|
|
|
|
|
|
+ with backup_io:
|
|
os.link(link_target, path)
|
|
os.link(link_target, path)
|
|
return
|
|
return
|
|
# Extract chunks, since the item which had the chunks was not extracted
|
|
# Extract chunks, since the item which had the chunks was not extracted
|
|
- with backup_io():
|
|
|
|
|
|
+ with backup_io:
|
|
fd = open(path, 'wb')
|
|
fd = open(path, 'wb')
|
|
with fd:
|
|
with fd:
|
|
ids = [c.id for c in item.chunks]
|
|
ids = [c.id for c in item.chunks]
|
|
for _, data in self.pipeline.fetch_many(ids, is_preloaded=True):
|
|
for _, data in self.pipeline.fetch_many(ids, is_preloaded=True):
|
|
if pi:
|
|
if pi:
|
|
pi.show(increase=len(data), info=[remove_surrogates(item.path)])
|
|
pi.show(increase=len(data), info=[remove_surrogates(item.path)])
|
|
- with backup_io():
|
|
|
|
|
|
+ with backup_io:
|
|
if sparse and self.zeros.startswith(data):
|
|
if sparse and self.zeros.startswith(data):
|
|
# all-zero chunk: create a hole in a sparse file
|
|
# all-zero chunk: create a hole in a sparse file
|
|
fd.seek(len(data), 1)
|
|
fd.seek(len(data), 1)
|
|
else:
|
|
else:
|
|
fd.write(data)
|
|
fd.write(data)
|
|
- with backup_io():
|
|
|
|
|
|
+ with backup_io:
|
|
pos = fd.tell()
|
|
pos = fd.tell()
|
|
fd.truncate(pos)
|
|
fd.truncate(pos)
|
|
fd.flush()
|
|
fd.flush()
|
|
@@ -519,7 +521,7 @@ Number of files: {0.stats.nfiles}'''.format(
|
|
# Update master entry with extracted file path, so that following hardlinks don't extract twice.
|
|
# Update master entry with extracted file path, so that following hardlinks don't extract twice.
|
|
hardlink_masters[item.get('source') or original_path] = (None, path)
|
|
hardlink_masters[item.get('source') or original_path] = (None, path)
|
|
return
|
|
return
|
|
- with backup_io():
|
|
|
|
|
|
+ with backup_io:
|
|
# No repository access beyond this point.
|
|
# No repository access beyond this point.
|
|
if stat.S_ISDIR(mode):
|
|
if stat.S_ISDIR(mode):
|
|
if not os.path.exists(path):
|
|
if not os.path.exists(path):
|
|
@@ -705,7 +707,7 @@ Number of files: {0.stats.nfiles}'''.format(
|
|
|
|
|
|
def stat_ext_attrs(self, st, path):
|
|
def stat_ext_attrs(self, st, path):
|
|
attrs = {}
|
|
attrs = {}
|
|
- with backup_io():
|
|
|
|
|
|
+ with backup_io:
|
|
xattrs = xattr.get_all(path, follow_symlinks=False)
|
|
xattrs = xattr.get_all(path, follow_symlinks=False)
|
|
bsdflags = get_flags(path, st)
|
|
bsdflags = get_flags(path, st)
|
|
acl_get(path, attrs, st, self.numeric_owner)
|
|
acl_get(path, attrs, st, self.numeric_owner)
|
|
@@ -742,7 +744,7 @@ Number of files: {0.stats.nfiles}'''.format(
|
|
return 'b' # block device
|
|
return 'b' # block device
|
|
|
|
|
|
def process_symlink(self, path, st):
|
|
def process_symlink(self, path, st):
|
|
- with backup_io():
|
|
|
|
|
|
+ with backup_io:
|
|
source = os.readlink(path)
|
|
source = os.readlink(path)
|
|
item = Item(path=make_path_safe(path), source=source)
|
|
item = Item(path=make_path_safe(path), source=source)
|
|
item.update(self.stat_attrs(st, path))
|
|
item.update(self.stat_attrs(st, path))
|
|
@@ -854,7 +856,7 @@ Number of files: {0.stats.nfiles}'''.format(
|
|
else:
|
|
else:
|
|
compress = self.compression_decider1.decide(path)
|
|
compress = self.compression_decider1.decide(path)
|
|
self.file_compression_logger.debug('%s -> compression %s', path, compress['name'])
|
|
self.file_compression_logger.debug('%s -> compression %s', path, compress['name'])
|
|
- with backup_io():
|
|
|
|
|
|
+ with backup_io:
|
|
fh = Archive._open_rb(path)
|
|
fh = Archive._open_rb(path)
|
|
with os.fdopen(fh, 'rb') as fd:
|
|
with os.fdopen(fh, 'rb') as fd:
|
|
self.chunk_file(item, cache, self.stats, backup_io_iter(self.chunker.chunkify(fd, fh)), compress=compress)
|
|
self.chunk_file(item, cache, self.stats, backup_io_iter(self.chunker.chunkify(fd, fh)), compress=compress)
|