refactor: use std::string in tr_scrape_response (#1866)

This commit is contained in:
Charles Kerr 2021-09-30 16:33:31 -05:00 committed by GitHub
parent 14fc626943
commit 3fd5c81a22
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 21 additions and 25 deletions

View File

@ -12,6 +12,8 @@
#error only the libtransmission announcer module should #include this header.
#endif
#include <string>
#include "transmission.h" /* SHA_DIGEST_LENGTH */
#include "session.h" /* PEER_ID_LEN */
@ -68,7 +70,7 @@ struct tr_scrape_response_row
int downloaders;
};
typedef struct
struct tr_scrape_response
{
/* whether or not we managed to connect to the tracker */
bool did_connect;
@ -83,15 +85,15 @@ typedef struct
struct tr_scrape_response_row rows[TR_MULTISCRAPE_MAX];
/* the raw scrape url */
char* url;
std::string url;
/* human-readable error string on failure, or NULL */
char* errmsg;
std::string errmsg;
/* minimum interval (in seconds) allowed between scrapes.
* this is an unofficial extension that some trackers won't support. */
int min_request_interval;
} tr_scrape_response;
};
typedef void (*tr_scrape_response_func)(tr_scrape_response const* response, void* user_data);

View File

@ -370,9 +370,7 @@ static void on_scrape_done_eventthread(void* vdata)
data->response_func(&data->response, data->response_func_user_data);
}
tr_free(data->response.errmsg);
tr_free(data->response.url);
tr_free(data);
delete data;
}
static void on_scrape_done(
@ -389,7 +387,7 @@ static void on_scrape_done(
tr_scrape_response* response = &data->response;
response->did_connect = did_connect;
response->did_timeout = did_timeout;
dbgmsg(data->log_name, "Got scrape response for \"%s\"", response->url);
dbgmsg(data->log_name, "Got scrape response for \"%s\"", response->url.c_str());
if (response_code != HTTP_OK)
{
@ -514,11 +512,10 @@ void tr_tracker_http_scrape(
tr_scrape_response_func response_func,
void* response_func_user_data)
{
struct scrape_data* d;
char* url = scrape_url_new(request);
d = tr_new0(struct scrape_data, 1);
d->response.url = tr_strdup(request->url);
auto* d = new scrape_data{};
d->response.url = request->url;
d->response_func = response_func;
d->response_func_user_data = response_func_user_data;
d->response.row_count = request->info_hash_count;

View File

@ -213,8 +213,6 @@ static struct tau_scrape_request* tau_scrape_request_new(
static void tau_scrape_request_free(struct tau_scrape_request* req)
{
tr_free(req->response.errmsg);
tr_free(req->response.url);
delete req;
}

View File

@ -156,15 +156,14 @@ typedef struct tr_announcer
time_t tauUpkeepAt;
} tr_announcer;
static struct tr_scrape_info* tr_announcerGetScrapeInfo(struct tr_announcer* announcer, char const* url)
static struct tr_scrape_info* tr_announcerGetScrapeInfo(struct tr_announcer* announcer, std::string const& url)
{
struct tr_scrape_info* info = nullptr;
if (!tr_str_is_empty(url))
if (!std::empty(url))
{
auto const urlstr = std::string{ url };
auto& scrapes = announcer->scrape_info;
auto const it = scrapes.try_emplace(urlstr, urlstr, TR_MULTISCRAPE_MAX);
auto const it = scrapes.try_emplace(url, url, TR_MULTISCRAPE_MAX);
info = &it.first->second;
}
@ -1362,7 +1361,7 @@ static void on_scrape_error(tr_session const* session, tr_tier* tier, char const
tier->scrapeAt = get_next_scrape_time(session, tier, interval);
}
static tr_tier* find_tier(tr_torrent* tor, char const* scrape)
static tr_tier* find_tier(tr_torrent* tor, std::string const& scrape)
{
struct tr_torrent_tiers* tt = tor->tiers;
@ -1407,7 +1406,7 @@ static void on_scrape_done(tr_scrape_response const* response, void* vsession)
"downloaders:%d "
"min_request_interval:%d "
"err:%s ",
response->url,
response->url.c_str(),
(int)response->did_connect,
(int)response->did_timeout,
row->seeders,
@ -1415,7 +1414,7 @@ static void on_scrape_done(tr_scrape_response const* response, void* vsession)
row->downloads,
row->downloaders,
response->min_request_interval,
response->errmsg != nullptr ? response->errmsg : "none");
std::empty(response->errmsg) ? "none" : response->errmsg.c_str());
tier->isScraping = false;
tier->lastScrapeTime = now;
@ -1430,9 +1429,9 @@ static void on_scrape_done(tr_scrape_response const* response, void* vsession)
{
on_scrape_error(session, tier, _("Tracker did not respond"));
}
else if (response->errmsg != nullptr)
else if (!std::empty(response->errmsg))
{
on_scrape_error(session, tier, response->errmsg);
on_scrape_error(session, tier, response->errmsg.c_str());
}
else
{
@ -1473,9 +1472,9 @@ static void on_scrape_done(tr_scrape_response const* response, void* vsession)
}
/* Maybe reduce the number of torrents in a multiscrape req */
if (multiscrape_too_big(response->errmsg))
if (multiscrape_too_big(response->errmsg.c_str()))
{
char const* url = response->url;
auto const& url = response->url;
struct tr_scrape_info* const scrape_info = tr_announcerGetScrapeInfo(announcer, url);
if (scrape_info != nullptr)
{
@ -1492,7 +1491,7 @@ static void on_scrape_done(tr_scrape_response const* response, void* vsession)
char* scheme = nullptr;
char* host = nullptr;
int port;
if (tr_urlParse(url, strlen(url), &scheme, &host, &port, nullptr))
if (tr_urlParse(std::data(url), std::size(url), &scheme, &host, &port, nullptr))
{
/* don't log the full URL, since that might have a personal announce id */
char* sanitized_url = tr_strdup_printf("%s://%s:%d", scheme, host, port);