mirror of
https://github.com/Radarr/Radarr
synced 2025-01-01 04:45:35 +00:00
When running under mono, WebClient will sometimes return less data
than it should. This causes the FetchFeedService to log errors because the XML received cannot be parsed. Setting the AutomaticDecompression property on the WebRequest fixes this issue.
This commit is contained in:
parent
75236118dd
commit
be0d779448
1 changed files with 12 additions and 2 deletions
|
@ -22,6 +22,16 @@ public interface IHttpProvider
|
|||
|
||||
public class HttpProvider : IHttpProvider
|
||||
{
|
||||
private class GZipWebClient : WebClient
|
||||
{
|
||||
protected override WebRequest GetWebRequest(Uri address)
|
||||
{
|
||||
HttpWebRequest request = (HttpWebRequest)base.GetWebRequest(address);
|
||||
request.AutomaticDecompression = DecompressionMethods.GZip | DecompressionMethods.Deflate;
|
||||
return request;
|
||||
}
|
||||
}
|
||||
|
||||
private readonly Logger _logger;
|
||||
|
||||
public const string CONTENT_LENGTH_HEADER = "Content-Length";
|
||||
|
@ -49,7 +59,7 @@ public string DownloadString(string url, ICredentials identity)
|
|||
{
|
||||
try
|
||||
{
|
||||
var client = new WebClient { Credentials = identity };
|
||||
var client = new GZipWebClient { Credentials = identity };
|
||||
client.Headers.Add(HttpRequestHeader.UserAgent, _userAgent);
|
||||
return client.DownloadString(url);
|
||||
}
|
||||
|
@ -107,7 +117,7 @@ public void DownloadFile(string url, string fileName)
|
|||
_logger.Debug("Downloading [{0}] to [{1}]", url, fileName);
|
||||
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
var webClient = new WebClient();
|
||||
var webClient = new GZipWebClient();
|
||||
webClient.Headers.Add(HttpRequestHeader.UserAgent, _userAgent);
|
||||
webClient.DownloadFile(url, fileName);
|
||||
stopWatch.Stop();
|
||||
|
|
Loading…
Reference in a new issue