mirror of
https://github.com/borgbackup/borg.git
synced 2025-02-22 22:22:27 +00:00
Fix incorrect propagation of OSErrors in create code
This commit is contained in:
parent
79c59bffa8
commit
5b453856ec
2 changed files with 43 additions and 8 deletions
|
@ -1,4 +1,5 @@
|
||||||
from binascii import hexlify
|
from binascii import hexlify
|
||||||
|
from contextlib import contextmanager
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from getpass import getuser
|
from getpass import getuser
|
||||||
from itertools import groupby
|
from itertools import groupby
|
||||||
|
@ -45,6 +46,37 @@
|
||||||
flags_noatime = flags_normal | getattr(os, 'O_NOATIME', 0)
|
flags_noatime = flags_normal | getattr(os, 'O_NOATIME', 0)
|
||||||
|
|
||||||
|
|
||||||
|
class InputOSError(Exception):
|
||||||
|
"""Wrapper for OSError raised while accessing input files."""
|
||||||
|
def __init__(self, os_error):
|
||||||
|
self.os_error = os_error
|
||||||
|
self.errno = os_error.errno
|
||||||
|
self.strerror = os_error.strerror
|
||||||
|
self.filename = os_error.filename
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return str(self.os_error)
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def input_io():
|
||||||
|
"""Context manager changing OSError to InputOSError."""
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
except OSError as os_error:
|
||||||
|
raise InputOSError(os_error) from os_error
|
||||||
|
|
||||||
|
|
||||||
|
def input_io_iter(iterator):
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
with input_io():
|
||||||
|
item = next(iterator)
|
||||||
|
except StopIteration:
|
||||||
|
return
|
||||||
|
yield item
|
||||||
|
|
||||||
|
|
||||||
class DownloadPipeline:
|
class DownloadPipeline:
|
||||||
|
|
||||||
def __init__(self, repository, key):
|
def __init__(self, repository, key):
|
||||||
|
@ -464,12 +496,14 @@ def stat_attrs(self, st, path):
|
||||||
}
|
}
|
||||||
if self.numeric_owner:
|
if self.numeric_owner:
|
||||||
item[b'user'] = item[b'group'] = None
|
item[b'user'] = item[b'group'] = None
|
||||||
xattrs = xattr.get_all(path, follow_symlinks=False)
|
with input_io():
|
||||||
|
xattrs = xattr.get_all(path, follow_symlinks=False)
|
||||||
if xattrs:
|
if xattrs:
|
||||||
item[b'xattrs'] = StableDict(xattrs)
|
item[b'xattrs'] = StableDict(xattrs)
|
||||||
if has_lchflags and st.st_flags:
|
if has_lchflags and st.st_flags:
|
||||||
item[b'bsdflags'] = st.st_flags
|
item[b'bsdflags'] = st.st_flags
|
||||||
acl_get(path, item, st, self.numeric_owner)
|
with input_io():
|
||||||
|
acl_get(path, item, st, self.numeric_owner)
|
||||||
return item
|
return item
|
||||||
|
|
||||||
def process_dir(self, path, st):
|
def process_dir(self, path, st):
|
||||||
|
@ -504,7 +538,7 @@ def process_stdin(self, path, cache):
|
||||||
uid, gid = 0, 0
|
uid, gid = 0, 0
|
||||||
fd = sys.stdin.buffer # binary
|
fd = sys.stdin.buffer # binary
|
||||||
chunks = []
|
chunks = []
|
||||||
for chunk in self.chunker.chunkify(fd):
|
for chunk in input_io_iter(self.chunker.chunkify(fd)):
|
||||||
chunks.append(cache.add_chunk(self.key.id_hash(chunk), chunk, self.stats))
|
chunks.append(cache.add_chunk(self.key.id_hash(chunk), chunk, self.stats))
|
||||||
self.stats.nfiles += 1
|
self.stats.nfiles += 1
|
||||||
t = int_to_bigint(int(time.time()) * 1000000000)
|
t = int_to_bigint(int(time.time()) * 1000000000)
|
||||||
|
@ -552,10 +586,11 @@ def process_file(self, path, st, cache, ignore_inode=False):
|
||||||
item = {b'path': safe_path}
|
item = {b'path': safe_path}
|
||||||
# Only chunkify the file if needed
|
# Only chunkify the file if needed
|
||||||
if chunks is None:
|
if chunks is None:
|
||||||
fh = Archive._open_rb(path)
|
with input_io():
|
||||||
|
fh = Archive._open_rb(path)
|
||||||
with os.fdopen(fh, 'rb') as fd:
|
with os.fdopen(fh, 'rb') as fd:
|
||||||
chunks = []
|
chunks = []
|
||||||
for chunk in self.chunker.chunkify(fd, fh):
|
for chunk in input_io_iter(self.chunker.chunkify(fd, fh)):
|
||||||
chunks.append(cache.add_chunk(self.key.id_hash(chunk), chunk, self.stats))
|
chunks.append(cache.add_chunk(self.key.id_hash(chunk), chunk, self.stats))
|
||||||
if self.show_progress:
|
if self.show_progress:
|
||||||
self.stats.show_progress(item=item, dt=0.2)
|
self.stats.show_progress(item=item, dt=0.2)
|
||||||
|
|
|
@ -29,7 +29,7 @@
|
||||||
from .repository import Repository
|
from .repository import Repository
|
||||||
from .cache import Cache
|
from .cache import Cache
|
||||||
from .key import key_creator, RepoKey, PassphraseKey
|
from .key import key_creator, RepoKey, PassphraseKey
|
||||||
from .archive import Archive, ArchiveChecker, CHUNKER_PARAMS
|
from .archive import input_io, InputOSError, Archive, ArchiveChecker, CHUNKER_PARAMS
|
||||||
from .remote import RepositoryServer, RemoteRepository, cache_if_remote
|
from .remote import RepositoryServer, RemoteRepository, cache_if_remote
|
||||||
|
|
||||||
has_lchflags = hasattr(os, 'lchflags')
|
has_lchflags = hasattr(os, 'lchflags')
|
||||||
|
@ -198,7 +198,7 @@ def create_inner(archive, cache):
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
try:
|
try:
|
||||||
status = archive.process_stdin(path, cache)
|
status = archive.process_stdin(path, cache)
|
||||||
except OSError as e:
|
except InputOSError as e:
|
||||||
status = 'E'
|
status = 'E'
|
||||||
self.print_warning('%s: %s', path, e)
|
self.print_warning('%s: %s', path, e)
|
||||||
else:
|
else:
|
||||||
|
@ -273,7 +273,7 @@ def _process(self, archive, cache, matcher, exclude_caches, exclude_if_present,
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
try:
|
try:
|
||||||
status = archive.process_file(path, st, cache, self.ignore_inode)
|
status = archive.process_file(path, st, cache, self.ignore_inode)
|
||||||
except OSError as e:
|
except InputOSError as e:
|
||||||
status = 'E'
|
status = 'E'
|
||||||
self.print_warning('%s: %s', path, e)
|
self.print_warning('%s: %s', path, e)
|
||||||
elif stat.S_ISDIR(st.st_mode):
|
elif stat.S_ISDIR(st.st_mode):
|
||||||
|
|
Loading…
Reference in a new issue