Merge pull request #2171 from ThomasWaldmann/limit-unpacker

borg rpc: use limited msgpack.Unpacker
This commit is contained in:
enkore 2017-02-21 21:53:09 +01:00 committed by GitHub
commit cd1df96ef5
5 changed files with 32 additions and 10 deletions

View File

@ -248,8 +248,8 @@ denial of repository service.
The situation were a server can create a general DoS on the client
should be avoided, but might be possible by e.g. forcing the client to
allocate large amounts of memory to decode large messages (or messages
that merely indicate a large amount of data follows). See issue
:issue:`2139` for details.
that merely indicate a large amount of data follows). The RPC protocol
code uses a limited msgpack Unpacker to prohibit this.
We believe that other kinds of attacks, especially critical vulnerabilities
like remote code execution are inhibited by the design of the protocol:

View File

@ -39,7 +39,7 @@ from .item import Item, ArchiveItem
from .key import key_factory
from .platform import acl_get, acl_set, set_flags, get_flags, swidth
from .remote import cache_if_remote
from .repository import Repository
from .repository import Repository, LIST_SCAN_LIMIT
has_lchmod = hasattr(os, 'lchmod')
@ -1060,7 +1060,7 @@ class ArchiveChecker:
self.chunks = ChunkIndex(capacity)
marker = None
while True:
result = self.repository.list(limit=10000, marker=marker)
result = self.repository.list(limit=LIST_SCAN_LIMIT, marker=marker)
if not result:
break
marker = result[-1]

View File

@ -57,7 +57,7 @@ from .key import key_creator, tam_required_file, tam_required, RepoKey, Passphra
from .keymanager import KeyManager
from .platform import get_flags, umount, get_process_id
from .remote import RepositoryServer, RemoteRepository, cache_if_remote
from .repository import Repository
from .repository import Repository, LIST_SCAN_LIMIT
from .selftest import selftest
from .upgrader import AtticRepositoryUpgrader, BorgRepositoryUpgrader
@ -1305,7 +1305,7 @@ class Archiver:
marker = None
i = 0
while True:
result = repository.list(limit=10000, marker=marker)
result = repository.list(limit=LIST_SCAN_LIMIT, marker=marker)
if not result:
break
marker = result[-1]

View File

@ -23,7 +23,7 @@ from .helpers import sysinfo
from .helpers import bin_to_hex
from .helpers import replace_placeholders
from .helpers import yes
from .repository import Repository
from .repository import Repository, MAX_OBJECT_SIZE, LIST_SCAN_LIMIT
from .version import parse_version, format_version
from .logger import create_logger
@ -57,6 +57,27 @@ def os_write(fd, data):
return amount
def get_limited_unpacker(kind):
"""return a limited Unpacker because we should not trust msgpack data received from remote"""
args = dict(use_list=False, # return tuples, not lists
max_bin_len=0, # not used
max_ext_len=0, # not used
max_buffer_size=3 * max(BUFSIZE, MAX_OBJECT_SIZE),
max_str_len=MAX_OBJECT_SIZE, # a chunk or other repo object
)
if kind == 'server':
args.update(dict(max_array_len=100, # misc. cmd tuples
max_map_len=100, # misc. cmd dicts
))
elif kind == 'client':
args.update(dict(max_array_len=LIST_SCAN_LIMIT, # result list from repo.list() / .scan()
max_map_len=100, # misc. result dicts
))
else:
raise ValueError('kind must be "server" or "client"')
return msgpack.Unpacker(**args)
class ConnectionClosed(Error):
"""Connection closed by remote host"""
@ -185,7 +206,7 @@ class RepositoryServer: # pragma: no cover
# Make stderr blocking
fl = fcntl.fcntl(stderr_fd, fcntl.F_GETFL)
fcntl.fcntl(stderr_fd, fcntl.F_SETFL, fl & ~os.O_NONBLOCK)
unpacker = msgpack.Unpacker(use_list=False)
unpacker = get_limited_unpacker('server')
while True:
r, w, es = select.select([stdin_fd], [], [], 10)
if r:
@ -487,8 +508,7 @@ class RemoteRepository:
self.ignore_responses = set()
self.responses = {}
self.ratelimit = SleepingBandwidthLimiter(args.remote_ratelimit * 1024 if args and args.remote_ratelimit else 0)
self.unpacker = msgpack.Unpacker(use_list=False)
self.unpacker = get_limited_unpacker('client')
self.server_version = parse_version('1.0.8') # fallback version if server is too old to send version information
self.p = None
testing = location.host == '__testsuite__'

View File

@ -33,6 +33,8 @@ TAG_PUT = 0
TAG_DELETE = 1
TAG_COMMIT = 2
LIST_SCAN_LIMIT = 10000 # repo.list() / .scan() result count limit the borg client uses
FreeSpace = partial(defaultdict, int)