1
0
Fork 0
mirror of https://github.com/borgbackup/borg.git synced 2025-03-10 06:03:38 +00:00

Merge pull request #7550 from sashadev-sky/fix-argparse-errors-1.2

Fix argparse error messages
This commit is contained in:
TW 2023-05-08 15:39:11 +02:00 committed by GitHub
commit 2181103c3f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 40 additions and 30 deletions

View file

@ -15,6 +15,7 @@ which compressor has been used to compress the data and dispatch to the correct
decompressor. decompressor.
""" """
from argparse import ArgumentTypeError
import random import random
from struct import Struct from struct import Struct
import zlib import zlib
@ -551,7 +552,7 @@ class CompressionSpec:
values = s.split(',') values = s.split(',')
count = len(values) count = len(values)
if count < 1: if count < 1:
raise ValueError raise ArgumentTypeError("not enough arguments")
# --compression algo[,level] # --compression algo[,level]
self.name = values[0] self.name = values[0]
if self.name in ('none', 'lz4', ): if self.name in ('none', 'lz4', ):
@ -562,9 +563,9 @@ class CompressionSpec:
elif count == 2: elif count == 2:
level = int(values[1]) level = int(values[1])
if not 0 <= level <= 9: if not 0 <= level <= 9:
raise ValueError raise ArgumentTypeError("level must be >= 0 and <= 9")
else: else:
raise ValueError raise ArgumentTypeError("too many arguments")
self.level = level self.level = level
elif self.name in ('zstd', ): elif self.name in ('zstd', ):
if count < 2: if count < 2:
@ -572,28 +573,28 @@ class CompressionSpec:
elif count == 2: elif count == 2:
level = int(values[1]) level = int(values[1])
if not 1 <= level <= 22: if not 1 <= level <= 22:
raise ValueError raise ArgumentTypeError("level must be >= 1 and <= 22")
else: else:
raise ValueError raise ArgumentTypeError("too many arguments")
self.level = level self.level = level
elif self.name == 'auto': elif self.name == 'auto':
if 2 <= count <= 3: if 2 <= count <= 3:
compression = ','.join(values[1:]) compression = ','.join(values[1:])
else: else:
raise ValueError raise ArgumentTypeError("bad arguments")
self.inner = CompressionSpec(compression) self.inner = CompressionSpec(compression)
elif self.name == 'obfuscate': elif self.name == 'obfuscate':
if 3 <= count <= 5: if 3 <= count <= 5:
level = int(values[1]) level = int(values[1])
if not ((1 <= level <= 6) or (110 <= level <= 123)): if not ((1 <= level <= 6) or (110 <= level <= 123)):
raise ValueError raise ArgumentTypeError("level must be >= 1 and <= 6 or >= 110 and <= 123")
self.level = level self.level = level
compression = ','.join(values[2:]) compression = ','.join(values[2:])
else: else:
raise ValueError raise ArgumentTypeError("bad arguments")
self.inner = CompressionSpec(compression) self.inner = CompressionSpec(compression)
else: else:
raise ValueError raise ArgumentTypeError("unsupported compression type")
@property @property
def compressor(self): def compressor(self):

View file

@ -98,7 +98,7 @@ def ChunkerParams(s):
params = s.strip().split(',') params = s.strip().split(',')
count = len(params) count = len(params)
if count == 0: if count == 0:
raise ValueError('no chunker params given') raise argparse.ArgumentTypeError('no chunker params given')
algo = params[0].lower() algo = params[0].lower()
if algo == CH_FIXED and 2 <= count <= 3: # fixed, block_size[, header_size] if algo == CH_FIXED and 2 <= count <= 3: # fixed, block_size[, header_size]
block_size = int(params[1]) block_size = int(params[1])
@ -109,9 +109,11 @@ def ChunkerParams(s):
# or in-memory chunk management. # or in-memory chunk management.
# choose the block (chunk) size wisely: if you have a lot of data and you cut # choose the block (chunk) size wisely: if you have a lot of data and you cut
# it into very small chunks, you are asking for trouble! # it into very small chunks, you are asking for trouble!
raise ValueError('block_size must not be less than 64 Bytes') raise argparse.ArgumentTypeError('block_size must not be less than 64 Bytes')
if block_size > MAX_DATA_SIZE or header_size > MAX_DATA_SIZE: if block_size > MAX_DATA_SIZE or header_size > MAX_DATA_SIZE:
raise ValueError('block_size and header_size must not exceed MAX_DATA_SIZE [%d]' % MAX_DATA_SIZE) raise argparse.ArgumentTypeError(
'block_size and header_size must not exceed MAX_DATA_SIZE [%d]' % MAX_DATA_SIZE
)
return algo, block_size, header_size return algo, block_size, header_size
if algo == 'default' and count == 1: # default if algo == 'default' and count == 1: # default
return CHUNKER_PARAMS return CHUNKER_PARAMS
@ -119,14 +121,18 @@ def ChunkerParams(s):
if algo == CH_BUZHASH and count == 5 or count == 4: # [buzhash, ]chunk_min, chunk_max, chunk_mask, window_size if algo == CH_BUZHASH and count == 5 or count == 4: # [buzhash, ]chunk_min, chunk_max, chunk_mask, window_size
chunk_min, chunk_max, chunk_mask, window_size = (int(p) for p in params[count - 4:]) chunk_min, chunk_max, chunk_mask, window_size = (int(p) for p in params[count - 4:])
if not (chunk_min <= chunk_mask <= chunk_max): if not (chunk_min <= chunk_mask <= chunk_max):
raise ValueError('required: chunk_min <= chunk_mask <= chunk_max') raise argparse.ArgumentTypeError('required: chunk_min <= chunk_mask <= chunk_max')
if chunk_min < 6: if chunk_min < 6:
# see comment in 'fixed' algo check # see comment in 'fixed' algo check
raise ValueError('min. chunk size exponent must not be less than 6 (2^6 = 64B min. chunk size)') raise argparse.ArgumentTypeError(
'min. chunk size exponent must not be less than 6 (2^6 = 64B min. chunk size)'
)
if chunk_max > 23: if chunk_max > 23:
raise ValueError('max. chunk size exponent must not be more than 23 (2^23 = 8MiB max. chunk size)') raise argparse.ArgumentTypeError(
'max. chunk size exponent must not be more than 23 (2^23 = 8MiB max. chunk size)'
)
return CH_BUZHASH, chunk_min, chunk_max, chunk_mask, window_size return CH_BUZHASH, chunk_min, chunk_max, chunk_mask, window_size
raise ValueError('invalid chunker params') raise argparse.ArgumentTypeError('invalid chunker params')
def FilesCacheMode(s): def FilesCacheMode(s):
@ -134,11 +140,13 @@ def FilesCacheMode(s):
VALID_MODES = ('cis', 'ims', 'cs', 'ms', 'cr', 'mr', 'd', 's') # letters in alpha order VALID_MODES = ('cis', 'ims', 'cs', 'ms', 'cr', 'mr', 'd', 's') # letters in alpha order
entries = set(s.strip().split(',')) entries = set(s.strip().split(','))
if not entries <= set(ENTRIES_MAP): if not entries <= set(ENTRIES_MAP):
raise ValueError('cache mode must be a comma-separated list of: %s' % ','.join(sorted(ENTRIES_MAP))) raise argparse.ArgumentTypeError(
'cache mode must be a comma-separated list of: %s' % ','.join(sorted(ENTRIES_MAP))
)
short_entries = {ENTRIES_MAP[entry] for entry in entries} short_entries = {ENTRIES_MAP[entry] for entry in entries}
mode = ''.join(sorted(short_entries)) mode = ''.join(sorted(short_entries))
if mode not in VALID_MODES: if mode not in VALID_MODES:
raise ValueError('cache mode short must be one of: %s' % ','.join(VALID_MODES)) raise argparse.ArgumentTypeError('cache mode short must be one of: %s' % ','.join(VALID_MODES))
return mode return mode
@ -219,7 +227,7 @@ def SortBySpec(text):
from .manifest import AI_HUMAN_SORT_KEYS from .manifest import AI_HUMAN_SORT_KEYS
for token in text.split(','): for token in text.split(','):
if token not in AI_HUMAN_SORT_KEYS: if token not in AI_HUMAN_SORT_KEYS:
raise ValueError('Invalid sort key: %s' % token) raise argparse.ArgumentTypeError('Invalid sort key: %s' % token)
return text.replace('timestamp', 'ts') return text.replace('timestamp', 'ts')

View file

@ -1,3 +1,4 @@
import argparse
import os import os
import zlib import zlib
try: try:
@ -198,7 +199,7 @@ def test_obfuscate():
def test_compression_specs(): def test_compression_specs():
with pytest.raises(ValueError): with pytest.raises(argparse.ArgumentTypeError):
CompressionSpec('') CompressionSpec('')
assert isinstance(CompressionSpec('none').compressor, CNONE) assert isinstance(CompressionSpec('none').compressor, CNONE)
@ -213,7 +214,7 @@ def test_compression_specs():
zlib = CompressionSpec('zlib,9').compressor zlib = CompressionSpec('zlib,9').compressor
assert isinstance(zlib, ZLIB) assert isinstance(zlib, ZLIB)
assert zlib.level == 9 assert zlib.level == 9
with pytest.raises(ValueError): with pytest.raises(argparse.ArgumentTypeError):
CompressionSpec('zlib,9,invalid') CompressionSpec('zlib,9,invalid')
lzma = CompressionSpec('lzma').compressor lzma = CompressionSpec('lzma').compressor
@ -236,7 +237,7 @@ def test_compression_specs():
assert isinstance(zstd, ZSTD) assert isinstance(zstd, ZSTD)
assert zstd.level == 22 assert zstd.level == 22
with pytest.raises(ValueError): with pytest.raises(argparse.ArgumentTypeError):
CompressionSpec('lzma,9,invalid') CompressionSpec('lzma,9,invalid')
with pytest.raises(ValueError): with pytest.raises(argparse.ArgumentTypeError):
CompressionSpec('invalid') CompressionSpec('invalid')

View file

@ -348,19 +348,19 @@ def test_chunkerparams():
assert ChunkerParams('fixed,4096') == ('fixed', 4096, 0) assert ChunkerParams('fixed,4096') == ('fixed', 4096, 0)
assert ChunkerParams('fixed,4096,200') == ('fixed', 4096, 200) assert ChunkerParams('fixed,4096,200') == ('fixed', 4096, 200)
# invalid values checking # invalid values checking
with pytest.raises(ValueError): with pytest.raises(ArgumentTypeError):
ChunkerParams('crap,1,2,3,4') # invalid algo ChunkerParams('crap,1,2,3,4') # invalid algo
with pytest.raises(ValueError): with pytest.raises(ArgumentTypeError):
ChunkerParams('buzhash,5,7,6,4095') # too small min. size ChunkerParams('buzhash,5,7,6,4095') # too small min. size
with pytest.raises(ValueError): with pytest.raises(ArgumentTypeError):
ChunkerParams('buzhash,19,24,21,4095') # too big max. size ChunkerParams('buzhash,19,24,21,4095') # too big max. size
with pytest.raises(ValueError): with pytest.raises(ArgumentTypeError):
ChunkerParams('buzhash,23,19,21,4095') # violates min <= mask <= max ChunkerParams('buzhash,23,19,21,4095') # violates min <= mask <= max
with pytest.raises(ValueError): with pytest.raises(ArgumentTypeError):
ChunkerParams('fixed,63') # too small block size ChunkerParams('fixed,63') # too small block size
with pytest.raises(ValueError): with pytest.raises(ArgumentTypeError):
ChunkerParams('fixed,%d,%d' % (MAX_DATA_SIZE + 1, 4096)) # too big block size ChunkerParams('fixed,%d,%d' % (MAX_DATA_SIZE + 1, 4096)) # too big block size
with pytest.raises(ValueError): with pytest.raises(ArgumentTypeError):
ChunkerParams('fixed,%d,%d' % (4096, MAX_DATA_SIZE + 1)) # too big header size ChunkerParams('fixed,%d,%d' % (4096, MAX_DATA_SIZE + 1)) # too big header size