mirror of
https://github.com/Sonarr/Sonarr
synced 2024-12-30 19:56:54 +00:00
When running under mono, WebClient will sometimes return less data
than it should. This causes the FetchFeedService to log errors because the XML received cannot be parsed. Setting the AutomaticDecompression property on the WebRequest fixes this issue.
This commit is contained in:
parent
75236118dd
commit
be0d779448
1 changed files with 12 additions and 2 deletions
|
@ -22,6 +22,16 @@ namespace NzbDrone.Common.Http
|
|||
|
||||
public class HttpProvider : IHttpProvider
|
||||
{
|
||||
private class GZipWebClient : WebClient
|
||||
{
|
||||
protected override WebRequest GetWebRequest(Uri address)
|
||||
{
|
||||
HttpWebRequest request = (HttpWebRequest)base.GetWebRequest(address);
|
||||
request.AutomaticDecompression = DecompressionMethods.GZip | DecompressionMethods.Deflate;
|
||||
return request;
|
||||
}
|
||||
}
|
||||
|
||||
private readonly Logger _logger;
|
||||
|
||||
public const string CONTENT_LENGTH_HEADER = "Content-Length";
|
||||
|
@ -49,7 +59,7 @@ namespace NzbDrone.Common.Http
|
|||
{
|
||||
try
|
||||
{
|
||||
var client = new WebClient { Credentials = identity };
|
||||
var client = new GZipWebClient { Credentials = identity };
|
||||
client.Headers.Add(HttpRequestHeader.UserAgent, _userAgent);
|
||||
return client.DownloadString(url);
|
||||
}
|
||||
|
@ -107,7 +117,7 @@ namespace NzbDrone.Common.Http
|
|||
_logger.Debug("Downloading [{0}] to [{1}]", url, fileName);
|
||||
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
var webClient = new WebClient();
|
||||
var webClient = new GZipWebClient();
|
||||
webClient.Headers.Add(HttpRequestHeader.UserAgent, _userAgent);
|
||||
webClient.DownloadFile(url, fileName);
|
||||
stopWatch.Stop();
|
||||
|
|
Loading…
Reference in a new issue