refactor: use OutputIterator for url-escaped info hash strings (#3672)

This commit is contained in:
Charles Kerr 2022-08-18 14:49:20 -05:00 committed by GitHub
parent c171d6df7c
commit 479a16787e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 15 additions and 40 deletions

View File

@ -56,14 +56,10 @@ static tr_urlbuf announce_url_new(tr_session const* session, tr_announce_request
auto url = tr_urlbuf{};
auto out = std::back_inserter(url);
auto escaped_info_hash = std::array<char, SHA_DIGEST_LENGTH * 3 + 1>{};
tr_http_escape_sha1(std::data(escaped_info_hash), req->info_hash);
fmt::format_to(
out,
"{url}"
"{sep}info_hash={info_hash}"
"&peer_id={peer_id}"
"{sep}peer_id={peer_id}"
"&port={port}"
"&uploaded={uploaded}"
"&downloaded={downloaded}"
@ -74,7 +70,6 @@ static tr_urlbuf announce_url_new(tr_session const* session, tr_announce_request
"&supportcrypto=1",
fmt::arg("url", req->announce_url),
fmt::arg("sep", tr_strvContains(req->announce_url.sv(), '?') ? '&' : '?'),
fmt::arg("info_hash", std::data(escaped_info_hash)),
fmt::arg("peer_id", std::string_view{ std::data(req->peer_id), std::size(req->peer_id) }),
fmt::arg("port", req->port.host()),
fmt::arg("uploaded", req->up),
@ -83,6 +78,9 @@ static tr_urlbuf announce_url_new(tr_session const* session, tr_announce_request
fmt::arg("numwant", req->numwant),
fmt::arg("key", req->key));
fmt::format_to(out, "&info_hash=");
tr_http_escape(out, req->info_hash);
if (session->encryptionMode() == TR_ENCRYPTION_REQUIRED)
{
fmt::format_to(out, "&requirecrypto=1");
@ -652,12 +650,12 @@ static auto scrape_url_new(tr_scrape_request const* req)
char delimiter = tr_strvContains(sv, '?') ? '&' : '?';
auto scrape_url = tr_pathbuf{ sv };
auto out = std::back_inserter(scrape_url);
for (int i = 0; i < req->info_hash_count; ++i)
{
char str[SHA_DIGEST_LENGTH * 3 + 1];
tr_http_escape_sha1(str, req->info_hash[i]);
scrape_url.append(delimiter, "info_hash=", str);
fmt::format_to(out, "{}info_hash=", delimiter);
tr_http_escape(out, req->info_hash[i]);
delimiter = '&';
}

View File

@ -172,29 +172,6 @@ char const* tr_webGetResponseStr(long code)
}
}
static bool is_rfc2396_alnum(uint8_t ch)
{
return ('0' <= ch && ch <= '9') || ('A' <= ch && ch <= 'Z') || ('a' <= ch && ch <= 'z') || ch == '.' || ch == '-' ||
ch == '_' || ch == '~';
}
void tr_http_escape_sha1(char* out, tr_sha1_digest_t const& digest)
{
for (auto const b : digest)
{
if (is_rfc2396_alnum(uint8_t(b)))
{
*out++ = (char)b;
}
else
{
out = fmt::format_to(out, FMT_STRING("%{:02x}"), unsigned(b));
}
}
*out = '\0';
}
//// URLs
namespace

View File

@ -111,7 +111,11 @@ void tr_http_escape(OutputIt out, std::string_view in, bool escape_reserved)
}
}
void tr_http_escape_sha1(char* out, tr_sha1_digest_t const& digest);
template<typename OutputIt>
void tr_http_escape(OutputIt out, tr_sha1_digest_t const& digest)
{
tr_http_escape(out, std::string_view{ reinterpret_cast<char const*>(digest.data()), std::size(digest) }, false);
}
char const* tr_webGetResponseStr(long response_code);

View File

@ -8,6 +8,7 @@
#include <cinttypes> // PRIu64
#include <cstdio>
#include <ctime>
#include <iterator>
#include <string>
#include <string_view>
@ -339,14 +340,9 @@ void doScrape(tr_torrent_metainfo const& metainfo)
}
// build the full scrape URL
auto escaped = std::array<char, TR_SHA1_DIGEST_LEN * 3 + 1>{};
tr_http_escape_sha1(std::data(escaped), metainfo.infoHash());
auto const scrape = tracker.scrape.sv();
auto const url = tr_urlbuf{ scrape,
tr_strvContains(scrape, '?') ? '&' : '?',
"info_hash="sv,
std::string_view{ std::data(escaped) } };
auto url = tr_urlbuf{ scrape, tr_strvContains(scrape, '?') ? '&' : '?', "info_hash="sv };
tr_http_escape(std::back_inserter(url), metainfo.infoHash());
printf("%" TR_PRIsv " ... ", TR_PRIsv_ARG(url));
fflush(stdout);