mirror of
https://github.com/borgbackup/borg.git
synced 2025-01-03 05:35:58 +00:00
msgpack: increase max_buffer_size to ~4GiB
as borg now uses repository.store_load and .store_save to load and save the chunks cache, we need a rather high limit here. this is a quick fix, the real fix might be using chunks of the data (preferably <= MAX_OBJECT_SIZE), so there is less to unpack at once.
This commit is contained in:
parent
86c889cfeb
commit
e251162998
1 changed files with 1 additions and 1 deletions
|
@ -218,7 +218,7 @@ def get_limited_unpacker(kind):
|
||||||
# unpack(data) or from max_buffer_size for Unpacker(max_buffer_size=N).
|
# unpack(data) or from max_buffer_size for Unpacker(max_buffer_size=N).
|
||||||
args = dict(use_list=False, max_buffer_size=3 * max(BUFSIZE, MAX_OBJECT_SIZE)) # return tuples, not lists
|
args = dict(use_list=False, max_buffer_size=3 * max(BUFSIZE, MAX_OBJECT_SIZE)) # return tuples, not lists
|
||||||
if kind in ("server", "client"):
|
if kind in ("server", "client"):
|
||||||
pass # nothing special
|
args.update(dict(max_buffer_size=0)) # 0 means "maximum" here, ~4GiB - needed for store_load/save
|
||||||
elif kind in ("manifest", "archive", "key"):
|
elif kind in ("manifest", "archive", "key"):
|
||||||
args.update(dict(use_list=True, object_hook=StableDict)) # default value
|
args.update(dict(use_list=True, object_hook=StableDict)) # default value
|
||||||
else:
|
else:
|
||||||
|
|
Loading…
Reference in a new issue