mirror of
https://github.com/borgbackup/borg.git
synced 2025-02-28 08:46:56 +00:00
suppress unneeded exception context (PEP 409)
This commit is contained in:
parent
a6f9c29dfe
commit
fc52101d46
9 changed files with 12 additions and 12 deletions
|
@ -292,7 +292,7 @@ def extract_item(self, item, restore_attrs=True, dry_run=False, stdout=False, sp
|
|||
else:
|
||||
os.unlink(path)
|
||||
except UnicodeEncodeError:
|
||||
raise self.IncompatibleFilesystemEncodingError(path, sys.getfilesystemencoding())
|
||||
raise self.IncompatibleFilesystemEncodingError(path, sys.getfilesystemencoding()) from None
|
||||
except OSError:
|
||||
pass
|
||||
mode = item[b'mode']
|
||||
|
@ -332,7 +332,7 @@ def extract_item(self, item, restore_attrs=True, dry_run=False, stdout=False, sp
|
|||
try:
|
||||
os.symlink(source, path)
|
||||
except UnicodeEncodeError:
|
||||
raise self.IncompatibleFilesystemEncodingError(source, sys.getfilesystemencoding())
|
||||
raise self.IncompatibleFilesystemEncodingError(source, sys.getfilesystemencoding()) from None
|
||||
self.restore_attrs(path, item, symlink=True)
|
||||
elif stat.S_ISFIFO(mode):
|
||||
if not os.path.exists(os.path.dirname(path)):
|
||||
|
|
|
@ -136,7 +136,7 @@ def _do_open(self):
|
|||
raise Exception('%s has unexpected cache version %d (wanted: %d).' % (
|
||||
config_path, cache_version, wanted_version))
|
||||
except configparser.NoSectionError as e:
|
||||
raise Exception('%s does not look like a Borg cache.' % config_path)
|
||||
raise Exception('%s does not look like a Borg cache.' % config_path) from None
|
||||
self.id = self.config.get('cache', 'repository')
|
||||
self.manifest_id = unhexlify(self.config.get('cache', 'manifest'))
|
||||
self.timestamp = self.config.get('cache', 'timestamp', fallback=None)
|
||||
|
|
|
@ -173,7 +173,7 @@ def getxattr(self, inode, name):
|
|||
try:
|
||||
return item.get(b'xattrs', {})[name]
|
||||
except KeyError:
|
||||
raise llfuse.FUSEError(errno.ENODATA)
|
||||
raise llfuse.FUSEError(errno.ENODATA) from None
|
||||
|
||||
def _load_pending_archive(self, inode):
|
||||
# Check if this is an archive we need to load
|
||||
|
|
|
@ -777,7 +777,7 @@ def validator(text):
|
|||
try:
|
||||
loc = Location(text)
|
||||
except ValueError:
|
||||
raise argparse.ArgumentTypeError('Invalid location format: "%s"' % text)
|
||||
raise argparse.ArgumentTypeError('Invalid location format: "%s"' % text) from None
|
||||
if archive is True and not loc.archive:
|
||||
raise argparse.ArgumentTypeError('"%s": No archive specified' % text)
|
||||
elif archive is False and loc.archive:
|
||||
|
|
|
@ -390,7 +390,7 @@ def find_key(self):
|
|||
self.repository.load_key()
|
||||
return loc
|
||||
except configparser.NoOptionError:
|
||||
raise RepoKeyNotFoundError(loc)
|
||||
raise RepoKeyNotFoundError(loc) from None
|
||||
|
||||
def get_new_target(self, args):
|
||||
return self.repository
|
||||
|
|
|
@ -138,7 +138,7 @@ def acquire(self, timeout=None, sleep=None):
|
|||
if timer.timed_out_or_sleep():
|
||||
raise LockTimeout(self.path)
|
||||
except OSError as err:
|
||||
raise LockFailed(self.path, str(err))
|
||||
raise LockFailed(self.path, str(err)) from None
|
||||
else:
|
||||
with open(self.unique_name, "wb"):
|
||||
pass
|
||||
|
|
|
@ -159,7 +159,7 @@ def __init__(self, location, create=False, lock_wait=None, lock=True, args=None)
|
|||
try:
|
||||
version = self.call('negotiate', RPC_PROTOCOL_VERSION)
|
||||
except ConnectionClosed:
|
||||
raise ConnectionClosedWithHint('Is borg working on the server?')
|
||||
raise ConnectionClosedWithHint('Is borg working on the server?') from None
|
||||
if version != RPC_PROTOCOL_VERSION:
|
||||
raise Exception('Server insisted on using unsupported protocol version %d' % version)
|
||||
self.id = self.call('open', location.path, create, lock_wait, lock)
|
||||
|
|
|
@ -417,7 +417,7 @@ def get(self, id_):
|
|||
segment, offset = self.index[id_]
|
||||
return self.io.read(segment, offset, id_)
|
||||
except KeyError:
|
||||
raise self.ObjectNotFound(id_, self.path)
|
||||
raise self.ObjectNotFound(id_, self.path) from None
|
||||
|
||||
def get_many(self, ids, is_preloaded=False):
|
||||
for id_ in ids:
|
||||
|
@ -446,7 +446,7 @@ def delete(self, id, wait=True):
|
|||
try:
|
||||
segment, offset = self.index.pop(id)
|
||||
except KeyError:
|
||||
raise self.ObjectNotFound(id, self.path)
|
||||
raise self.ObjectNotFound(id, self.path) from None
|
||||
self.segments[segment] -= 1
|
||||
self.compact.add(segment)
|
||||
segment = self.io.write_delete(id)
|
||||
|
@ -628,7 +628,7 @@ def _read(self, fd, fmt, header, segment, offset, acceptable_tags):
|
|||
hdr_tuple = fmt.unpack(header)
|
||||
except struct.error as err:
|
||||
raise IntegrityError('Invalid segment entry header [segment {}, offset {}]: {}'.format(
|
||||
segment, offset, err))
|
||||
segment, offset, err)) from None
|
||||
if fmt is self.put_header_fmt:
|
||||
crc, size, tag, key = hdr_tuple
|
||||
elif fmt is self.header_fmt:
|
||||
|
|
|
@ -144,4 +144,4 @@ def __call__(self, prompt=None):
|
|||
try:
|
||||
return self.inputs.pop(0)
|
||||
except IndexError:
|
||||
raise EOFError
|
||||
raise EOFError from None
|
||||
|
|
Loading…
Reference in a new issue