mirror of
https://github.com/transmission/transmission
synced 2024-12-22 07:42:37 +00:00
fix: tr_variant_serde::parse_json()
bug fixes (#6901)
* perf: avoid unnecessary copying * fix: set `tr_variant_serde::end_` in `parse_json()` * test: `tr_variant_serde::end()` * fix: compensate for innate read cursor offset of `rapidjson::AutoUTFInputStream` * fix: stop parsing json after parsing a complete json root This matches the benc parser's behaviour * fixup! fix: stop parsing json after parsing a complete json root
This commit is contained in:
parent
c21ee87eea
commit
fa8be1b981
2 changed files with 40 additions and 5 deletions
|
@ -89,7 +89,7 @@ struct json_to_variant_handler : public rapidjson::BaseReaderHandler<>
|
|||
|
||||
bool String(Ch const* const str, rapidjson::SizeType const len, bool const copy)
|
||||
{
|
||||
*get_leaf() = copy ? tr_variant{ std::string{ str, len } } : tr_variant::unmanaged_string({ str, len });
|
||||
*get_leaf() = copy ? tr_variant{ std::string_view{ str, len } } : tr_variant::unmanaged_string({ str, len });
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -216,7 +216,13 @@ std::optional<tr_variant> tr_variant_serde::parse_json(std::string_view input)
|
|||
auto ms = rapidjson::MemoryStream{ begin, size };
|
||||
auto eis = rapidjson::AutoUTFInputStream<unsigned, rapidjson::MemoryStream>{ ms };
|
||||
auto reader = rapidjson::GenericReader<rapidjson::AutoUTF<unsigned>, rapidjson::UTF8<char>>{};
|
||||
reader.Parse(eis, handler);
|
||||
reader.Parse<rapidjson::kParseStopWhenDoneFlag>(eis, handler);
|
||||
|
||||
// Due to the nature of how AutoUTFInputStream works, when AutoUTFInputStream
|
||||
// is used with MemoryStream, the read cursor position is always 1 ahead of
|
||||
// the current character (unless the end of stream is reached).
|
||||
auto const pos = eis.Peek() == '\0' ? eis.Tell() : eis.Tell() - 1U;
|
||||
end_ = begin + pos;
|
||||
|
||||
if (!reader.HasParseError())
|
||||
{
|
||||
|
@ -229,13 +235,12 @@ std::optional<tr_variant> tr_variant_serde::parse_json(std::string_view input)
|
|||
}
|
||||
else
|
||||
{
|
||||
auto const err_offset = reader.GetErrorOffset();
|
||||
error_.set(
|
||||
EILSEQ,
|
||||
fmt::format(
|
||||
_("Couldn't parse JSON at position {position} '{text}': {error} ({error_code})"),
|
||||
fmt::arg("position", err_offset),
|
||||
fmt::arg("text", std::string_view{ begin + err_offset, std::min(size_t{ 16U }, size - err_offset) }),
|
||||
fmt::arg("position", pos),
|
||||
fmt::arg("text", std::string_view{ begin + pos, std::min(size_t{ 16U }, size - pos) }),
|
||||
fmt::arg("error", rapidjson::GetParseError_En(err_code)),
|
||||
fmt::arg("error_code", static_cast<std::underlying_type_t<decltype(err_code)>>(err_code))));
|
||||
}
|
||||
|
|
|
@ -535,3 +535,33 @@ TEST_F(VariantTest, variantFromBufFuzz)
|
|||
(void)json_serde.inplace().parse(buf);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(VariantTest, serdeEnd)
|
||||
{
|
||||
static auto constexpr TestsJson = std::array{
|
||||
std::tuple{ R"({ "json1": 1 }{ "json2": 2 })"sv, '{', 14U },
|
||||
std::tuple{ R"({ "json1": 1 })"sv, '\0', 14U },
|
||||
};
|
||||
static auto constexpr TestsBenc = std::array{
|
||||
std::tuple{ "d5:benc1i1eed5:benc2i2ee"sv, 'd', 12U },
|
||||
std::tuple{ "d5:benc1i1ee"sv, '\0', 12U },
|
||||
};
|
||||
|
||||
for (auto [in, c, pos] : TestsJson)
|
||||
{
|
||||
auto json_serde = tr_variant_serde::json().inplace();
|
||||
auto json_var = json_serde.parse(in).value_or(tr_variant{});
|
||||
EXPECT_TRUE(json_var.holds_alternative<tr_variant::Map>()) << json_serde.error_;
|
||||
EXPECT_EQ(*json_serde.end(), c);
|
||||
EXPECT_EQ(json_serde.end() - std::data(in), pos);
|
||||
}
|
||||
|
||||
for (auto [in, c, pos] : TestsBenc)
|
||||
{
|
||||
auto benc_serde = tr_variant_serde::benc().inplace();
|
||||
auto benc_var = benc_serde.parse(in).value_or(tr_variant{});
|
||||
EXPECT_TRUE(benc_var.holds_alternative<tr_variant::Map>()) << benc_serde.error_;
|
||||
EXPECT_EQ(*benc_serde.end(), c);
|
||||
EXPECT_EQ(benc_serde.end() - std::data(in), pos);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue