mirror of
https://github.com/borgbackup/borg.git
synced 2024-12-25 09:19:31 +00:00
fix cyclic import issue, fix tests
needed to increase ChunkBuffer size due to increased items stream chunk size to get the test working.
This commit is contained in:
parent
8be6761c26
commit
42b6a838da
3 changed files with 3 additions and 3 deletions
|
@ -169,7 +169,7 @@ def fetch_many(self, ids, is_preloaded=False):
|
||||||
|
|
||||||
|
|
||||||
class ChunkBuffer:
|
class ChunkBuffer:
|
||||||
BUFFER_SIZE = 1 * 1024 * 1024
|
BUFFER_SIZE = 8 * 1024 * 1024
|
||||||
|
|
||||||
def __init__(self, key, chunker_params=ITEMS_CHUNKER_PARAMS):
|
def __init__(self, key, chunker_params=ITEMS_CHUNKER_PARAMS):
|
||||||
self.buffer = BytesIO()
|
self.buffer = BytesIO()
|
||||||
|
|
|
@ -39,7 +39,6 @@
|
||||||
from . import hashindex
|
from . import hashindex
|
||||||
from . import shellpattern
|
from . import shellpattern
|
||||||
from .constants import * # NOQA
|
from .constants import * # NOQA
|
||||||
from .compress import get_compressor
|
|
||||||
|
|
||||||
# meta dict, data bytes
|
# meta dict, data bytes
|
||||||
_Chunk = namedtuple('_Chunk', 'meta data')
|
_Chunk = namedtuple('_Chunk', 'meta data')
|
||||||
|
@ -1584,6 +1583,7 @@ def decide(self, chunk):
|
||||||
return compr_spec, chunk
|
return compr_spec, chunk
|
||||||
|
|
||||||
def heuristic_lz4(self, compr_args, chunk):
|
def heuristic_lz4(self, compr_args, chunk):
|
||||||
|
from .compress import get_compressor
|
||||||
meta, data = chunk
|
meta, data = chunk
|
||||||
lz4 = get_compressor('lz4')
|
lz4 = get_compressor('lz4')
|
||||||
cdata = lz4.compress(data)
|
cdata = lz4.compress(data)
|
||||||
|
|
|
@ -109,7 +109,7 @@ def test(self):
|
||||||
self.assert_equal(data, [Item(internal_dict=d) for d in unpacker])
|
self.assert_equal(data, [Item(internal_dict=d) for d in unpacker])
|
||||||
|
|
||||||
def test_partial(self):
|
def test_partial(self):
|
||||||
big = "0123456789" * 10000
|
big = "0123456789abcdefghijklmnopqrstuvwxyz" * 25000
|
||||||
data = [Item(path='full', source=big), Item(path='partial', source=big)]
|
data = [Item(path='full', source=big), Item(path='partial', source=big)]
|
||||||
cache = MockCache()
|
cache = MockCache()
|
||||||
key = PlaintextKey(None)
|
key = PlaintextKey(None)
|
||||||
|
|
Loading…
Reference in a new issue