Fix incorrect propagation of OSErrors in create code

This commit is contained in:
Marian Beermann 2016-06-27 20:56:41 +02:00
parent 79c59bffa8
commit 5b453856ec
No known key found for this signature in database
GPG Key ID: 9B8450B91D1362C1
2 changed files with 43 additions and 8 deletions

View File

@ -1,4 +1,5 @@
from binascii import hexlify from binascii import hexlify
from contextlib import contextmanager
from datetime import datetime, timezone from datetime import datetime, timezone
from getpass import getuser from getpass import getuser
from itertools import groupby from itertools import groupby
@ -45,6 +46,37 @@ flags_normal = os.O_RDONLY | getattr(os, 'O_BINARY', 0)
flags_noatime = flags_normal | getattr(os, 'O_NOATIME', 0) flags_noatime = flags_normal | getattr(os, 'O_NOATIME', 0)
class InputOSError(Exception):
"""Wrapper for OSError raised while accessing input files."""
def __init__(self, os_error):
self.os_error = os_error
self.errno = os_error.errno
self.strerror = os_error.strerror
self.filename = os_error.filename
def __str__(self):
return str(self.os_error)
@contextmanager
def input_io():
"""Context manager changing OSError to InputOSError."""
try:
yield
except OSError as os_error:
raise InputOSError(os_error) from os_error
def input_io_iter(iterator):
while True:
try:
with input_io():
item = next(iterator)
except StopIteration:
return
yield item
class DownloadPipeline: class DownloadPipeline:
def __init__(self, repository, key): def __init__(self, repository, key):
@ -464,11 +496,13 @@ Number of files: {0.stats.nfiles}'''.format(
} }
if self.numeric_owner: if self.numeric_owner:
item[b'user'] = item[b'group'] = None item[b'user'] = item[b'group'] = None
with input_io():
xattrs = xattr.get_all(path, follow_symlinks=False) xattrs = xattr.get_all(path, follow_symlinks=False)
if xattrs: if xattrs:
item[b'xattrs'] = StableDict(xattrs) item[b'xattrs'] = StableDict(xattrs)
if has_lchflags and st.st_flags: if has_lchflags and st.st_flags:
item[b'bsdflags'] = st.st_flags item[b'bsdflags'] = st.st_flags
with input_io():
acl_get(path, item, st, self.numeric_owner) acl_get(path, item, st, self.numeric_owner)
return item return item
@ -504,7 +538,7 @@ Number of files: {0.stats.nfiles}'''.format(
uid, gid = 0, 0 uid, gid = 0, 0
fd = sys.stdin.buffer # binary fd = sys.stdin.buffer # binary
chunks = [] chunks = []
for chunk in self.chunker.chunkify(fd): for chunk in input_io_iter(self.chunker.chunkify(fd)):
chunks.append(cache.add_chunk(self.key.id_hash(chunk), chunk, self.stats)) chunks.append(cache.add_chunk(self.key.id_hash(chunk), chunk, self.stats))
self.stats.nfiles += 1 self.stats.nfiles += 1
t = int_to_bigint(int(time.time()) * 1000000000) t = int_to_bigint(int(time.time()) * 1000000000)
@ -552,10 +586,11 @@ Number of files: {0.stats.nfiles}'''.format(
item = {b'path': safe_path} item = {b'path': safe_path}
# Only chunkify the file if needed # Only chunkify the file if needed
if chunks is None: if chunks is None:
with input_io():
fh = Archive._open_rb(path) fh = Archive._open_rb(path)
with os.fdopen(fh, 'rb') as fd: with os.fdopen(fh, 'rb') as fd:
chunks = [] chunks = []
for chunk in self.chunker.chunkify(fd, fh): for chunk in input_io_iter(self.chunker.chunkify(fd, fh)):
chunks.append(cache.add_chunk(self.key.id_hash(chunk), chunk, self.stats)) chunks.append(cache.add_chunk(self.key.id_hash(chunk), chunk, self.stats))
if self.show_progress: if self.show_progress:
self.stats.show_progress(item=item, dt=0.2) self.stats.show_progress(item=item, dt=0.2)

View File

@ -29,7 +29,7 @@ from .upgrader import AtticRepositoryUpgrader, BorgRepositoryUpgrader
from .repository import Repository from .repository import Repository
from .cache import Cache from .cache import Cache
from .key import key_creator, RepoKey, PassphraseKey from .key import key_creator, RepoKey, PassphraseKey
from .archive import Archive, ArchiveChecker, CHUNKER_PARAMS from .archive import input_io, InputOSError, Archive, ArchiveChecker, CHUNKER_PARAMS
from .remote import RepositoryServer, RemoteRepository, cache_if_remote from .remote import RepositoryServer, RemoteRepository, cache_if_remote
has_lchflags = hasattr(os, 'lchflags') has_lchflags = hasattr(os, 'lchflags')
@ -198,7 +198,7 @@ class Archiver:
if not dry_run: if not dry_run:
try: try:
status = archive.process_stdin(path, cache) status = archive.process_stdin(path, cache)
except OSError as e: except InputOSError as e:
status = 'E' status = 'E'
self.print_warning('%s: %s', path, e) self.print_warning('%s: %s', path, e)
else: else:
@ -273,7 +273,7 @@ class Archiver:
if not dry_run: if not dry_run:
try: try:
status = archive.process_file(path, st, cache, self.ignore_inode) status = archive.process_file(path, st, cache, self.ignore_inode)
except OSError as e: except InputOSError as e:
status = 'E' status = 'E'
self.print_warning('%s: %s', path, e) self.print_warning('%s: %s', path, e)
elif stat.S_ISDIR(st.st_mode): elif stat.S_ISDIR(st.st_mode):