Sonarr/NzbDrone.Core/Providers/Indexer/IndexerBase.cs

239 lines
8.6 KiB
C#
Raw Normal View History

using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.ServiceModel.Syndication;
using System.Text.RegularExpressions;
using Ninject;
2011-04-04 03:50:12 +00:00
using NLog;
2012-02-11 00:48:20 +00:00
using NzbDrone.Common;
2011-04-04 03:50:12 +00:00
using NzbDrone.Core.Model;
using NzbDrone.Core.Providers.Core;
2011-04-04 03:50:12 +00:00
namespace NzbDrone.Core.Providers.Indexer
2011-04-04 03:50:12 +00:00
{
public abstract class IndexerBase
2011-04-04 03:50:12 +00:00
{
2011-04-22 06:23:29 +00:00
protected readonly Logger _logger;
2011-04-07 02:25:52 +00:00
private readonly HttpProvider _httpProvider;
protected readonly ConfigProvider _configProvider;
private static readonly Regex TitleSearchRegex = new Regex(@"[\W]", RegexOptions.IgnoreCase | RegexOptions.Compiled);
[Inject]
protected IndexerBase(HttpProvider httpProvider, ConfigProvider configProvider)
2011-04-04 06:53:22 +00:00
{
2011-04-05 05:30:13 +00:00
_httpProvider = httpProvider;
_configProvider = configProvider;
_logger = LogManager.GetLogger(GetType().ToString());
2011-04-04 06:53:22 +00:00
}
2011-05-27 03:54:28 +00:00
public IndexerBase()
{
2011-05-27 06:03:57 +00:00
2011-05-27 03:54:28 +00:00
}
2011-04-04 03:50:12 +00:00
/// <summary>
/// Gets the name for the feed
2011-04-04 03:50:12 +00:00
/// </summary>
public abstract string Name { get; }
2011-04-04 03:50:12 +00:00
2011-04-04 06:53:22 +00:00
/// <summary>
/// Gets the source URL for the feed
2011-04-04 06:53:22 +00:00
/// </summary>
protected abstract string[] Urls { get; }
2011-04-04 06:53:22 +00:00
public abstract bool IsConfigured { get; }
/// <summary>
/// Gets the credential.
/// </summary>
protected virtual NetworkCredential Credentials
{
get { return null; }
}
protected abstract IList<String> GetEpisodeSearchUrls(string seriesTitle, int seasonNumber, int episodeNumber);
protected abstract IList<String> GetDailyEpisodeSearchUrls(string seriesTitle, DateTime date);
protected abstract IList<String> GetSeasonSearchUrls(string seriesTitle, int seasonNumber);
protected abstract IList<String> GetPartialSeasonSearchUrls(string seriesTitle, int seasonNumber, int episodeWildcard);
/// <summary>
/// This method can be overwritten to provide indexer specific info parsing
/// </summary>
/// <param name="item">RSS item that needs to be parsed</param>
/// <param name="currentResult">Result of the built in parse function.</param>
/// <returns></returns>
protected virtual EpisodeParseResult CustomParser(SyndicationItem item, EpisodeParseResult currentResult)
{
return currentResult;
}
/// <summary>
/// Generates direct link to download an NZB
/// </summary>
/// <param name = "item">RSS Feed item to generate the link for</param>
/// <returns>Download link URL</returns>
protected abstract string NzbDownloadUrl(SyndicationItem item);
2011-04-04 06:53:22 +00:00
/// <summary>
2011-04-10 02:44:01 +00:00
/// Fetches RSS feed and process each news item.
2011-04-04 06:53:22 +00:00
/// </summary>
2011-05-27 03:54:28 +00:00
public virtual IList<EpisodeParseResult> FetchRss()
2011-04-04 03:50:12 +00:00
{
_logger.Debug("Fetching feeds from " + Name);
var result = new List<EpisodeParseResult>();
2011-04-04 03:50:12 +00:00
result = Fetch(Urls);
_logger.Info("Finished processing feeds from " + Name);
return result;
}
public virtual IList<EpisodeParseResult> FetchSeason(string seriesTitle, int seasonNumber)
{
_logger.Debug("Searching {0} for {1}-Season {2}", Name, seriesTitle, seasonNumber);
var searchUrls = GetSeasonSearchUrls(GetQueryTitle(seriesTitle), seasonNumber);
var result = Fetch(searchUrls);
_logger.Info("Finished searching {0} for {1}-S{2}, Found {3}", Name, seriesTitle, seasonNumber, result.Count);
return result;
}
public virtual IList<EpisodeParseResult> FetchPartialSeason(string seriesTitle, int seasonNumber, int episodePrefix)
{
_logger.Debug("Searching {0} for {1}-Season {2}, Prefix: {3}", Name, seriesTitle, seasonNumber, episodePrefix);
var searchUrls = GetPartialSeasonSearchUrls(GetQueryTitle(seriesTitle), seasonNumber, episodePrefix);
var result = Fetch(searchUrls);
_logger.Info("Finished searching {0} for {1}-S{2}, Found {3}", Name, seriesTitle, seasonNumber, result.Count);
return result;
}
2011-05-27 03:54:28 +00:00
public virtual IList<EpisodeParseResult> FetchEpisode(string seriesTitle, int seasonNumber, int episodeNumber)
{
2011-05-27 06:03:57 +00:00
_logger.Debug("Searching {0} for {1}-S{2:00}E{3:00}", Name, seriesTitle, seasonNumber, episodeNumber);
var searchUrls = GetEpisodeSearchUrls(GetQueryTitle(seriesTitle), seasonNumber, episodeNumber);
var result = Fetch(searchUrls);
_logger.Info("Finished searching {0} for {1}-S{2}E{3:00}, Found {4}", Name, seriesTitle, seasonNumber, episodeNumber, result.Count);
return result;
2011-04-25 18:16:38 +00:00
}
2011-04-04 03:50:12 +00:00
public virtual IList<EpisodeParseResult> FetchDailyEpisode(string seriesTitle, DateTime airDate)
{
_logger.Debug("Searching {0} for {1}-{2}", Name, seriesTitle, airDate.ToShortDateString());
var searchUrls = GetDailyEpisodeSearchUrls(GetQueryTitle(seriesTitle), airDate);
var result = Fetch(searchUrls);
_logger.Info("Finished searching {0} for {1}-{2}, Found {3}", Name, seriesTitle, airDate.ToShortDateString(), result.Count);
return result;
}
private List<EpisodeParseResult> Fetch(IEnumerable<string> urls)
{
var result = new List<EpisodeParseResult>();
if (!IsConfigured)
{
_logger.Warn("Indexer '{0}' isn't configured correctly. please reconfigure the indexer in settings page.", Name);
return result;
}
foreach (var url in urls)
{
try
{
_logger.Trace("Downloading RSS " + url);
var reader = new SyndicationFeedXmlReader(_httpProvider.DownloadStream(url, Credentials));
var feed = SyndicationFeed.Load(reader).Items;
foreach (var item in feed)
2011-04-22 06:23:29 +00:00
{
try
2011-04-22 20:14:02 +00:00
{
var parsedEpisode = ParseFeed(item);
if (parsedEpisode != null)
{
parsedEpisode.NzbUrl = NzbDownloadUrl(item);
parsedEpisode.Indexer = Name;
2012-01-20 06:35:10 +00:00
parsedEpisode.OriginalString = item.Title.Text;
result.Add(parsedEpisode);
}
}
catch (Exception itemEx)
{
2012-01-19 02:08:17 +00:00
itemEx.Data.Add("FeedUrl", url);
itemEx.Data.Add("Item", item.Title);
_logger.ErrorException("An error occurred while processing feed item", itemEx);
2011-04-22 20:14:02 +00:00
}
}
}
catch (WebException webException)
{
if (webException.Message.Contains("503"))
{
_logger.Warn("{0} server is currently unbelievable. {1}", Name, webException.Message);
}
else
{
webException.Data.Add("FeedUrl", url);
_logger.ErrorException("An error occurred while processing feed: " + Name, webException);
}
}
catch (Exception feedEx)
{
2012-01-19 02:08:17 +00:00
feedEx.Data.Add("FeedUrl", url);
_logger.ErrorException("An error occurred while processing feed: " + Name, feedEx);
2011-04-22 06:23:29 +00:00
}
2011-04-04 03:50:12 +00:00
}
return result;
2011-04-04 03:50:12 +00:00
}
/// <summary>
/// Parses the RSS feed item
/// </summary>
/// <param name = "item">RSS feed item to parse</param>
/// <returns>Detailed episode info</returns>
public EpisodeParseResult ParseFeed(SyndicationItem item)
{
var episodeParseResult = Parser.ParseTitle(item.Title.Text);
return CustomParser(item, episodeParseResult);
}
2011-05-27 06:03:57 +00:00
/// <summary>
/// This method can be overwritten to provide indexer specific title cleaning
/// </summary>
/// <param name="title">Title that needs to be cleaned</param>
/// <returns></returns>
public virtual string GetQueryTitle(string title)
2011-05-27 06:03:57 +00:00
{
var cleanTitle = TitleSearchRegex.Replace(title, "+").Trim('+', ' ');
//remove any repeating +s
cleanTitle = Regex.Replace(cleanTitle, @"\+{1,100}", "+");
return cleanTitle;
2011-05-27 06:03:57 +00:00
}
2011-04-04 03:50:12 +00:00
}
2011-04-10 02:44:01 +00:00
}