Commit after Merge.

This commit is contained in:
Mark McDowall 2011-04-22 15:23:29 -07:00
parent fd32a04d45
commit 361d1f8d3b
17 changed files with 276 additions and 538 deletions

View File

@ -14,6 +14,7 @@ namespace AutoMoq
{
public class AutoMoqer
{
internal readonly MockBehavior DefaultBehavior = MockBehavior.Default;
internal Type ResolveType;
private IUnityContainer container;
private IDictionary<Type, object> registeredMocks;
@ -23,6 +24,13 @@ namespace AutoMoq
SetupAutoMoqer(new UnityContainer());
}
public AutoMoqer(MockBehavior defaultBehavior)
{
DefaultBehavior = defaultBehavior;
SetupAutoMoqer(new UnityContainer());
}
internal AutoMoqer(IUnityContainer container)
{
SetupAutoMoqer(container);
@ -37,7 +45,12 @@ namespace AutoMoq
return result;
}
public virtual Mock<T> GetMock<T>(MockBehavior behavior = MockBehavior.Default) where T : class
public virtual Mock<T> GetMock<T>() where T : class
{
return GetMock<T>(DefaultBehavior);
}
public virtual Mock<T> GetMock<T>(MockBehavior behavior) where T : class
{
ResolveType = null;
var type = GetTheMockType<T>();
@ -103,7 +116,8 @@ namespace AutoMoq
foreach (var registeredMock in registeredMocks)
{
var mock = registeredMock.Value as Mock;
mock.VerifyAll();
if (mock != null)
mock.VerifyAll();
}
}
@ -112,10 +126,10 @@ namespace AutoMoq
private void SetupAutoMoqer(IUnityContainer container)
{
this.container = container;
registeredMocks = new Dictionary<Type, object>();
AddTheAutoMockingContainerExtensionToTheContainer(container);
container.RegisterInstance(this);
registeredMocks = new Dictionary<Type, object>();
AddTheAutoMockingContainerExtensionToTheContainer(container);
}
private static void AddTheAutoMockingContainerExtensionToTheContainer(IUnityContainer container)

View File

@ -16,7 +16,8 @@ namespace AutoMoq.Unity
public AutoMockingBuilderStrategy(IEnumerable<Type> registeredTypes, IUnityContainer container)
{
mockFactory = new MockFactory(MockBehavior.Loose);
var autoMoqer = container.Resolve<AutoMoqer>();
mockFactory = new MockFactory(autoMoqer.DefaultBehavior);
this.registeredTypes = registeredTypes;
this.container = container;
}
@ -62,19 +63,19 @@ namespace AutoMoq.Unity
private Mock InvokeTheMockCreationMethod(MethodInfo createMethod)
{
return (Mock) createMethod.Invoke(mockFactory, new object[] {new List<object>().ToArray()});
return (Mock)createMethod.Invoke(mockFactory, new object[] { new List<object>().ToArray() });
}
private MethodInfo GenerateAnInterfaceMockCreationMethod(Type type)
{
var createMethodWithNoParameters = mockFactory.GetType().GetMethod("Create", EmptyArgumentList());
return createMethodWithNoParameters.MakeGenericMethod(new[] {type});
return createMethodWithNoParameters.MakeGenericMethod(new[] { type });
}
private static Type[] EmptyArgumentList()
{
return new[] {typeof (object[])};
return new[] { typeof(object[]) };
}
#endregion

View File

@ -116,6 +116,35 @@ namespace NzbDrone.Core.Test
Assert.AreEqual(excpected, result);
}
[Test]
public void get_episode_by_parse_result()
{
var mocker = new AutoMoqer();
var repo = MockLib.GetEmptyRepository();
var fakeEpisodes = MockLib.GetFakeEpisodes(2);
repo.AddMany(fakeEpisodes);
mocker.SetConstant(repo);
var targetEpisode = fakeEpisodes[4];
var parseResult1 = new EpisodeParseResult
{
SeriesId = targetEpisode.SeriesId,
SeasonNumber = targetEpisode.SeasonNumber,
Episodes = new List<int> { targetEpisode.EpisodeNumber },
Quality = QualityTypes.DVD
};
var result = mocker.Resolve<EpisodeProvider>().GetEpisodeByParseResult(parseResult1);
Assert.Count(1, result);
Assert.AreEqual(targetEpisode.EpisodeId, result.First().EpisodeId);
Assert.AreEqual(targetEpisode.EpisodeNumber, result.First().EpisodeNumber);
Assert.AreEqual(targetEpisode.SeasonNumber, result.First().SeasonNumber);
Assert.AreEqual(targetEpisode.SeriesId, result.First().SeriesId);
}
[Test]
public void Missing_episode_should_be_added()
{

View File

@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Linq;
using AutoMoq;
using MbUnit.Framework;
using Moq;
using NzbDrone.Core.Providers;
@ -63,6 +64,30 @@ namespace NzbDrone.Core.Test
Assert.AreEqual(result.Count(), 1);
}
[Test]
public void add_item()
{
var mocker = new AutoMoqer();
var repo = MockLib.GetEmptyRepository();
mocker.SetConstant(repo);
var episodes = MockLib.GetFakeEpisodes(1);
repo.AddMany(episodes);
var episode = episodes[5];
var history = new History
{
Date = DateTime.Now,
EpisodeId = episode.EpisodeId,
NzbTitle = "my title"
};
mocker.Resolve<HistoryProvider>().Add(history);
}
[Test]
[Ignore]
public void Exists_True()

View File

@ -53,11 +53,24 @@ namespace NzbDrone.Core.Test
//Assert
Assert.Count(1, indexers);
}
[Test]
public void unmapped_series_shouldnt_call_any_providers()
{
var mocker = new AutoMoqer(MockBehavior.Strict);
mocker.GetMock<SeriesProvider>()
.Setup(c => c.FindSeries(It.IsAny<String>()))
.Returns<Series>(null);
var indexer = mocker.Resolve<MockIndexerProvider>();
indexer.ProcessItem(new SyndicationItem { Title = new TextSyndicationContent("Adventure.Inc.S01E18.DVDRip.XviD-OSiTV") });
}
}
public class MockIndexerProvider : IndexerProviderBase
{
public MockIndexerProvider(SeriesProvider seriesProvider, SeasonProvider seasonProvider, EpisodeProvider episodeProvider, ConfigProvider configProvider, HttpProvider httpProvider, IndexerProvider indexerProvider, HistoryProvider historyProvider) : base(seriesProvider, seasonProvider, episodeProvider, configProvider, httpProvider, indexerProvider, historyProvider)
public MockIndexerProvider(SeriesProvider seriesProvider, SeasonProvider seasonProvider, EpisodeProvider episodeProvider, ConfigProvider configProvider, HttpProvider httpProvider, IndexerProvider indexerProvider, HistoryProvider historyProvider)
: base(seriesProvider, seasonProvider, episodeProvider, configProvider, httpProvider, indexerProvider, historyProvider)
{
}

View File

@ -2,9 +2,11 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using FizzWare.NBuilder;
using Moq;
using NzbDrone.Core.Instrumentation;
using NzbDrone.Core.Providers.Core;
using NzbDrone.Core.Repository;
using SubSonic.DataProviders;
using SubSonic.Repository;
@ -17,7 +19,7 @@ namespace NzbDrone.Core.Test
{
public static string[] StandardSeries
{
get { return new[] {"c:\\tv\\the simpsons", "c:\\tv\\family guy", "c:\\tv\\southpark", "c:\\tv\\24"}; }
get { return new[] { "c:\\tv\\the simpsons", "c:\\tv\\family guy", "c:\\tv\\southpark", "c:\\tv\\24" }; }
}
public static ConfigProvider StandardConfig
@ -71,5 +73,23 @@ namespace NzbDrone.Core.Test
return mock.Object;
}
public static Series GetFakeSeries(int id, string title)
{
return Builder<Series>.CreateNew()
.With(c => c.SeriesId = id)
.With(c => c.Title = title)
.With(c => c.CleanTitle = Parser.NormalizeTitle(title))
.Build();
}
public static IList<Episode> GetFakeEpisodes(int seriesId)
{
var epNumber = new SequentialGenerator<int>();
return Builder<Episode>.CreateListOfSize(10)
.WhereAll().Have(c => c.SeriesId = seriesId)
.WhereAll().Have(c => c.EpisodeNumber = epNumber.Generate())
.Build();
}
}
}

View File

@ -12,35 +12,45 @@ namespace NzbDrone.Core.Test
/*Fucked-up hall of shame,
* WWE.Wrestlemania.27.PPV.HDTV.XviD-KYR
* The.Kennedys.Part.2.DSR.XviD-SYS
*
* Unreported.World.Chinas.Lost.Sons.WS.PDTV.XviD-FTP
*/
[Test]
[Timeout(1)]
[Row("Sonny.With.a.Chance.S02E15", 2, 15)]
[Row("WEEDS.S03E01-06.DUAL.BDRip.XviD.AC3.-HELLYWOOD", 3, 1)]
[Row("Two.and.a.Half.Me.103.720p.HDTV.X264-DIMENSION", 1, 3)]
[Row("Two.and.a.Half.Me.113.720p.HDTV.X264-DIMENSION", 1, 13)]
[Row("Two.and.a.Half.Me.1013.720p.HDTV.X264-DIMENSION", 10, 13)]
[Row("Chuck.4x05.HDTV.XviD-LOL", 4, 5)]
[Row("The.Girls.Next.Door.S03E06.DVDRip.XviD-WiDE", 3, 6)]
[Row("Degrassi.S10E27.WS.DSR.XviD-2HD", 10, 27)]
[Row("Sonny.With.a.Chance.S02E15", "Sonny.With.a.Chance", 2, 15)]
[Row("WEEDS.S03E01-06.DUAL.BDRip.XviD.AC3.-HELLYWOOD", "WEEDS", 3, 1)]
[Row("Two.and.a.Half.Me.103.720p.HDTV.X264-DIMENSION", "Two.and.a.Half.Me", 1, 3)]
[Row("Two.and.a.Half.Me.113.720p.HDTV.X264-DIMENSION", "Two.and.a.Half.Me", 1, 13)]
[Row("Two.and.a.Half.Me.1013.720p.HDTV.X264-DIMENSION", "Two.and.a.Half.Me", 10, 13)]
[Row("Chuck.4x05.HDTV.XviD-LOL", "Chuck", 4, 5)]
[Row("The.Girls.Next.Door.S03E06.DVDRip.XviD-WiDE", "The.Girls.Next.Door", 3, 6)]
[Row("Degrassi.S10E27.WS.DSR.XviD-2HD", "Degrassi", 10, 27)]
[Row("Parenthood.2010.S02E14.HDTV.XviD-LOL", "Parenthood", 2, 14)]
[Row("Hawaii Five 0 S01E19 720p WEB DL DD5 1 H 264 NT", "Hawaii Five", 1, 19)]
[Row("The Event S01E14 A Message Back 720p WEB DL DD5 1 H264 SURFER", "The Event", 1, 14)]
[Row("Adam Hills In Gordon St Tonight S01E07 WS PDTV XviD FUtV", "Adam Hills In Gordon St Tonight", 1, 7)]
[Row("Adam Hills In Gordon St Tonight S01E07 WS PDTV XviD FUtV", "Adam Hills In Gordon St Tonight", 1, 7)]
[Row("Adventure.Inc.S03E19.DVDRip.XviD-OSiTV", "Adventure.Inc", 3, 19)]
//[Row("The.Kennedys.Part.2.DSR.XviD-SYS", 1, 2)]
public void episode_parse(string postTitle, string title, int season, int episode)
{
var result = Parser.ParseEpisodeInfo(postTitle);
Assert.AreEqual(season, result.SeasonNumber);
Assert.AreEqual(episode, result.Episodes[0]);
Assert.AreEqual(Parser.NormalizeTitle(title), result.CleanTitle);
}
[Test]
[Row(@"z:\tv shows\battlestar galactica (2003)\Season 3\S03E05 - Collaborators.mkv", 3, 5)]
[Row(@"z:\tv shows\modern marvels\Season 16\S16E03 - The Potato.mkv", 16, 3)]
[Row(@"z:\tv shows\robot chicken\Specials\S00E16 - Dear Consumer - SD TV.avi", 0, 16)]
[Row(@"Parenthood.2010.S02E14.HDTV.XviD-LOL", 2, 14)]
[Row(@"Hawaii Five 0 S01E19 720p WEB DL DD5 1 H 264 NT", 1, 19)]
[Row(@"The Event S01E14 A Message Back 720p WEB DL DD5 1 H264 SURFER", 1, 14)]
[Row(@"Adam Hills In Gordon St Tonight S01E07 WS PDTV XviD FUtV", 1, 7)]
[Row(@"Adam Hills In Gordon St Tonight S01E07 WS PDTV XviD FUtV", 1, 7)]
//[Row("The.Kennedys.Part.2.DSR.XviD-SYS", 1, 2)]
public void episode_parse(string path, int season, int episode)
public void file_path_parse(string path, int season, int episode)
{
var result = Parser.ParseEpisodeInfo(path);
Assert.AreEqual(season, result.SeasonNumber);
Assert.AreEqual(episode, result.Episodes[0]);
}
[Test]
[Timeout(1)]
[Row("WEEDS.S03E01-06.DUAL.BDRip.XviD.AC3.-HELLYWOOD", QualityTypes.BDRip)]
@ -62,37 +72,40 @@ namespace NzbDrone.Core.Test
[Row("Chuck - S01E03 - Come Fly With Me - 1080p BluRay.mkv", QualityTypes.Bluray1080)]
[Row("Chuck - S11E06 - D-Yikes! - 720p WEB-DL.mkv", QualityTypes.WEBDL)]
[Row("WEEDS.S03E01-06.DUAL.BDRip.XviD.AC3.-HELLYWOOD.avi", QualityTypes.BDRip)]
public void quality_parse(string path, object quality)
public void quality_parse(string postTitle, object quality)
{
var result = Parser.ParseEpisodeInfo(path).Quality;
var result = Parser.ParseEpisodeInfo(postTitle).Quality;
Assert.AreEqual(quality, result);
}
[Test]
[Timeout(1)]
[Row("WEEDS.S03E01-06.DUAL.BDRip.XviD.AC3.-HELLYWOOD", 3, new[] { 1, 2, 3, 4, 5, 6 })]
[Row("Two.and.a.Half.Men.103.104.720p.HDTV.X264-DIMENSION", 1, new[] { 3, 4 })]
[Row("Weeds.S03E01.S03E02.720p.HDTV.X264-DIMENSION", 3, new[] { 1, 2 })]
[Row("The Borgias S01e01 e02 ShoHD On Demand 1080i DD5 1 ALANiS", 1, new[] { 1, 2 })]
[Row("Big Time Rush 1x01 to 10 480i DD2 0 Sianto", 1, new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 })]
[Row("White.Collar.2x04.2x05.720p.BluRay-FUTV", 2, new[] { 4, 5 })]
[Row("WEEDS.S03E01-06.DUAL.BDRip.XviD.AC3.-HELLYWOOD", "WEEDS", 3, new[] { 1, 2, 3, 4, 5, 6 })]
[Row("Two.and.a.Half.Men.103.104.720p.HDTV.X264-DIMENSION", "Two.and.a.Half.Men", 1, new[] { 3, 4 })]
[Row("Weeds.S03E01.S03E02.720p.HDTV.X264-DIMENSION", "Weeds", 3, new[] { 1, 2 })]
[Row("The Borgias S01e01 e02 ShoHD On Demand 1080i DD5 1 ALANiS", "The Borgias", 1, new[] { 1, 2 })]
[Row("Big Time Rush 1x01 to 10 480i DD2 0 Sianto", "Big Time Rush", 1, new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 })]
[Row("White.Collar.2x04.2x05.720p.BluRay-FUTV", "White.Collar", 2, new[] { 4, 5 })]
//[Row("The.Kennedys.Part.1.and.Part.2.DSR.XviD-SYS", 1, new[] { 1, 2 })]
public void episode_multipart_parse(string path, int season, int[] episodes)
public void episode_multipart_parse(string postTitle, string title, int season, int[] episodes)
{
var result = Parser.ParseEpisodeInfo(path);
var result = Parser.ParseEpisodeInfo(postTitle);
Assert.AreEqual(season, result.SeasonNumber);
Assert.Count(episodes.Length, result.Episodes);
Assert.AreElementsEqualIgnoringOrder(episodes, result.Episodes);
Assert.AreEqual(Parser.NormalizeTitle(title), result.CleanTitle);
}
[Test]
[Row("Conan 2011 04 18 Emma Roberts HDTV XviD BFF", 2011, 04, 18)]
[Row("The Tonight Show With Jay Leno 2011 04 15 1080i HDTV DD5 1 MPEG2 TrollHD", 2011, 04, 15)]
public void episode_daily_parse(string path, int year, int month, int day)
[Row("Conan 2011 04 18 Emma Roberts HDTV XviD BFF", "Conan", 2011, 04, 18)]
[Row("The Tonight Show With Jay Leno 2011 04 15 1080i HDTV DD5 1 MPEG2 TrollHD", "The Tonight Show With Jay Leno", 2011, 04, 15)]
[Row("The.Daily.Show.2010.10.11.Johnny.Knoxville.iTouch-MW", "The.Daily.Show", 2010, 10, 11)]
[Row("The Daily Show - 2011-04-12 - Gov. Deval Patrick", "The.Daily.Show", 2011, 04, 12)]
public void episode_daily_parse(string postTitle, string title, int year, int month, int day)
{
var result = Parser.ParseEpisodeInfo(path);
var result = Parser.ParseEpisodeInfo(postTitle);
var airDate = new DateTime(year, month, day);
Assert.AreEqual(Parser.NormalizeTitle(title), result.CleanTitle);
Assert.AreEqual(airDate, result.AirDate);
}
@ -107,5 +120,66 @@ namespace NzbDrone.Core.Test
var result = Parser.NormalizePath(dirty);
Assert.AreEqual(clean, result);
}
[Test]
[Row("CaPitAl", "capital")]
[Row("peri.od", "period")]
[Row("this.^&%^**$%@#$!That", "thisthat")]
public void Normalize_Title(string dirty, string clean)
{
var result = Parser.NormalizeTitle(dirty);
Assert.AreEqual(clean, result);
}
[Test]
[Row("the")]
[Row("And")]
[Row("Or")]
public void Normalize_removed_common_words(string word)
{
var dirtyFormat = new[]
{
"word.{0}.word",
"word {0} word",
"word-{0}-word",
"{0}.word.word",
"{0}-word-word",
"{0} word word",
"word.word.{0}",
"word-word-{0}",
"word-word {0}",
};
foreach (var s in dirtyFormat)
{
var dirty = String.Format(s, word);
Assert.AreEqual("wordword", Parser.NormalizeTitle(dirty));
}
}
[Test]
[Row("the")]
[Row("And")]
[Row("Or")]
public void Normalize_not_removed_common_words_in_the_middle(string word)
{
var dirtyFormat = new[]
{
"word.{0}word",
"word {0}word",
"word-{0}word",
"word{0}.word",
"word{0}-word",
"word{0}-word",
};
foreach (var s in dirtyFormat)
{
var dirty = String.Format(s, word);
Assert.AreEqual("word" + word.ToLower() + "word", Parser.NormalizeTitle(dirty));
}
}
}
}

View File

@ -1,158 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using AutoMoq;
using FizzWare.NBuilder;
using MbUnit.Framework;
using Moq;
using NzbDrone.Core.Providers;
using NzbDrone.Core.Repository;
using NzbDrone.Core.Repository.Quality;
using SubSonic.Repository;
using TvdbLib.Data;
// ReSharper disable InconsistentNaming
namespace NzbDrone.Core.Test
{
[TestFixture]
// ReSharper disable InconsistentNaming
public class SeriesProviderTest
{
[Test]
public void Map_path_to_series()
{
//Arrange
var fakeSeries = Builder<TvdbSeries>.CreateNew()
.With(f => f.SeriesName = "The Simpsons")
.Build();
var fakeSearch = Builder<TvdbSearchResult>.CreateNew()
.With(s => s.Id = fakeSeries.Id)
.With(s => s.SeriesName = fakeSeries.SeriesName)
.Build();
var mocker = new AutoMoqer();
mocker.GetMock<IRepository>()
.Setup(f => f.Exists<Series>(c => c.SeriesId == It.IsAny<int>()))
.Returns(false);
mocker.GetMock<TvDbProvider>()
.Setup(f => f.GetSeries(It.IsAny<String>()))
.Returns(fakeSearch);
mocker.GetMock<TvDbProvider>()
.Setup(f => f.GetSeries(fakeSeries.Id, false))
.Returns(fakeSeries)
.Verifiable();
//Act
var mappedSeries = mocker.Resolve<SeriesProvider>().MapPathToSeries(@"D:\TV Shows\The Simpsons");
//Assert
mocker.GetMock<TvDbProvider>().VerifyAll();
Assert.AreEqual(fakeSeries, mappedSeries);
}
[Test]
public void Add_new_series()
{
var mocker = new AutoMoqer();
mocker.SetConstant(MockLib.GetEmptyRepository());
string path = "C:\\Test\\";
int tvDbId = 1234;
int qualityProfileId = 2;
//Act
var seriesProvider = mocker.Resolve<SeriesProvider>();
seriesProvider.AddSeries(path, tvDbId, qualityProfileId);
//Assert
var series = seriesProvider.GetAllSeries();
Assert.IsNotEmpty(series);
Assert.Count(1, series);
Assert.AreEqual(path, series.First().Path);
Assert.AreEqual(tvDbId, series.First().SeriesId);
Assert.AreEqual(qualityProfileId, series.First().QualityProfileId);
}
[Test]
[Row(new object[] {"That's Life - 2x03 -The Devil and Miss DeLucca", "That's Life"})]
[Row(new object[] {"Van.Duin.Op.Zn.Best.S02E05.DUTCH.WS.PDTV.XViD-DiFFERENT", "Van Duin Op Zn Best"})]
[Row(new object[] {"Dollhouse.S02E06.The.Left.Hand.720p.BluRay.x264-SiNNERS", "Dollhouse"})]
[Row(new object[] {"Heroes.S02.COMPLETE.German.PROPER.DVDRip.XviD-Prim3time", "Heroes"})]
[Ignore("should be updated to validate agains a remote episode instance rather than just the title string")]
public void Test_Parse_Success(string postTitle, string title)
{
var result = Parser.ParseEpisodeInfo(postTitle);
//Assert.AreEqual(title, result, postTitle);
}
[Test]
public void Test_is_monitored()
{
var mocker = new AutoMoqer();
mocker.SetConstant(MockLib.GetEmptyRepository());
mocker.Resolve<IRepository>().Add(Builder<Series>.CreateNew()
.With(c => c.Monitored = true)
.With(c => c.SeriesId = 12)
.Build());
mocker.Resolve<IRepository>().Add(Builder<Series>.CreateNew()
.With(c => c.Monitored = false)
.With(c => c.SeriesId = 11)
.Build());
//Act, Assert
var provider = mocker.Resolve<SeriesProvider>();
Assert.IsTrue(provider.IsMonitored(12));
Assert.IsFalse(provider.IsMonitored(11));
Assert.IsFalse(provider.IsMonitored(1));
}
[Test]
[Row(12, QualityTypes.TV, true)]
[Row(12, QualityTypes.Unknown, false)]
[Row(12, QualityTypes.Bluray1080, false)]
[Row(12, QualityTypes.Bluray720, false)]
[Row(12, QualityTypes.HDTV, false)]
[Row(12, QualityTypes.WEBDL, false)]
public void QualityWanted(int seriesId, QualityTypes qualityTypes, Boolean result)
{
var quality = Builder<QualityProfile>.CreateNew()
.With(q => q.Allowed = new List<QualityTypes> {QualityTypes.BDRip, QualityTypes.DVD, QualityTypes.TV})
.With(q => q.Cutoff = QualityTypes.DVD)
.Build();
var series = Builder<Series>.CreateNew()
.With(c => c.SeriesId = 12)
.With(c => c.QualityProfileId = quality.QualityProfileId)
.Build();
var mocker = new AutoMoqer();
var emptyRepository = MockLib.GetEmptyRepository();
mocker.SetConstant(emptyRepository);
mocker.GetMock<QualityProvider>()
.Setup(c => c.Find(quality.QualityProfileId)).Returns(quality);
emptyRepository.Add(series);
//Act
var needed = mocker.Resolve<SeriesProvider>().QualityWanted(seriesId, qualityTypes);
Assert.AreEqual(result, needed);
}
}
}

View File

@ -6,7 +6,7 @@ namespace NzbDrone.Core.Model
{
public class EpisodeParseResult
{
internal string SeriesTitle { get; set; }
internal string CleanTitle { get; set; }
public int SeriesId { get; set; }
internal int SeasonNumber { get; set; }
@ -24,9 +24,9 @@ namespace NzbDrone.Core.Model
public override string ToString()
{
if (Episodes == null)
return string.Format("Series:{0} Air Date:{1}", SeriesTitle, AirDate.Date);
return string.Format("Series:{0} Air Date:{1}", CleanTitle, AirDate.Date);
return string.Format("Series:{0} Season:{1} Episode:{2}", SeriesTitle, SeasonNumber,
return string.Format("Series:{0} Season:{1} Episode:{2}", CleanTitle, SeasonNumber,
String.Join(",", Episodes));
}

View File

@ -76,7 +76,7 @@ namespace NzbDrone.Core
parsedEpisode = new EpisodeParseResult
{
Proper = title.ToLower().Contains("proper"),
SeriesTitle = seriesName,
CleanTitle = seriesName,
SeasonNumber = season,
Year = year,
Episodes = new List<int>()
@ -106,7 +106,7 @@ namespace NzbDrone.Core
parsedEpisode = new EpisodeParseResult
{
Proper = title.ToLower().Contains("proper"),
SeriesTitle = seriesName,
CleanTitle = seriesName,
Year = year,
AirDate = new DateTime(airyear, airmonth, airday)
};

View File

@ -1,291 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using NLog;
using NzbDrone.Core.Model;
using NzbDrone.Core.Repository;
using SubSonic.Repository;
namespace NzbDrone.Core.Providers
{
public class EpisodeProvider
{
private static readonly Logger Logger = LogManager.GetCurrentClassLogger();
private readonly QualityProvider _quality;
private readonly SeasonProvider _seasons;
private readonly SeriesProvider _series;
private readonly IRepository _sonicRepo;
private readonly TvDbProvider _tvDb;
public EpisodeProvider(IRepository sonicRepo, SeriesProvider seriesProvider,
SeasonProvider seasonProvider, TvDbProvider tvDbProvider,
QualityProvider quality)
{
_sonicRepo = sonicRepo;
_series = seriesProvider;
_tvDb = tvDbProvider;
_seasons = seasonProvider;
_quality = quality;
}
public EpisodeProvider()
{
}
public virtual Episode GetEpisode(long id)
{
return _sonicRepo.Single<Episode>(id);
}
public virtual Episode GetEpisode(int seriesId, int seasonNumber, int episodeNumber)
{
return
_sonicRepo.Single<Episode>(
c => c.SeriesId == seriesId && c.SeasonNumber == seasonNumber && c.EpisodeNumber == episodeNumber);
}
public virtual IList<Episode> GetEpisodeBySeries(long seriesId)
{
return _sonicRepo.Find<Episode>(e => e.SeriesId == seriesId);
}
public virtual IList<Episode> GetEpisodeBySeason(long seasonId)
{
return _sonicRepo.Find<Episode>(e => e.SeasonId == seasonId);
}
public virtual IList<Episode> GetEpisodeByParseResult(EpisodeParseResult parseResult)
{
return _sonicRepo.Find<Episode>(e =>
e.SeriesId == parseResult.SeriesId &&
e.SeasonNumber == parseResult.SeasonNumber &&
parseResult.Episodes.Contains(e.EpisodeNumber));
}
public virtual String GetSabTitle(EpisodeParseResult parseResult)
{
//Show Name - 1x01-1x02 - Episode Name
//Show Name - 1x01 - Episode Name
var episodeString = new List<String>();
foreach (var episode in parseResult.Episodes)
{
episodeString.Add(String.Format("{0}x{1}", parseResult.SeasonNumber, episode));
}
var epNumberString = String.Join("-", episodeString);
var series = _series.GetSeries(parseResult.SeriesId);
var folderName = new DirectoryInfo(series.Path).Name;
var result = String.Format("{0} - {1} - {2} {3}", folderName, epNumberString, parseResult.EpisodeTitle, parseResult.Quality);
if (parseResult.Proper)
{
result += " [Proper]";
}
return result;
}
/// <summary>
/// Comprehensive check on whether or not this episode is needed.
/// </summary>
/// <param name = "parsedReport">Episode that needs to be checked</param>
/// <returns></returns>
public virtual bool IsNeeded(EpisodeParseResult parsedReport)
{
foreach (var episode in parsedReport.Episodes)
{
var episodeInfo = GetEpisode(parsedReport.SeriesId, parsedReport.SeasonNumber, episode);
if (episodeInfo == null)
{
//Todo: How do we want to handle this really? Episode could be released before information is on TheTvDB
//(Parks and Rec did this a lot in the first season, from experience)
//Keivan: Should automatically add the episode to db with minimal information. then update the description/title when available.
episodeInfo = new Episode
{
SeriesId = parsedReport.SeriesId,
AirDate = DateTime.Now.Date,
EpisodeNumber = episode,
SeasonNumber = parsedReport.SeasonNumber,
Title = String.Empty,
Overview = String.Empty,
Language = "en"
};
_sonicRepo.Add(episodeInfo);
}
var file = episodeInfo.EpisodeFile;
if (file != null)
{
//If not null we need to see if this episode has the quality as the download (or if it is better)
if (file.Quality == parsedReport.Quality && file.Proper) continue;
//There will never be a time when the episode quality is less than what we have and we want it... ever.... I think.
if (file.Quality > parsedReport.Quality) continue;
//Now we need to handle upgrades and actually pay attention to the Cutoff Value
if (file.Quality < parsedReport.Quality)
{
var quality = _quality.Find(episodeInfo.Series.QualityProfileId);
if (quality.Cutoff <= file.Quality && file.Proper) continue;
}
}
return true; //If we get to this point and the file has not yet been rejected then accept it
}
return false;
}
public virtual void RefreshEpisodeInfo(int seriesId)
{
Logger.Info("Starting episode info refresh for series:{0}", seriesId);
int successCount = 0;
int failCount = 0;
var targetSeries = _tvDb.GetSeries(seriesId, true);
var updateList = new List<Episode>();
var newList = new List<Episode>();
Logger.Debug("Updating season info for series:{0}", targetSeries.SeriesName);
targetSeries.Episodes.Select(e => new { e.SeasonId, e.SeasonNumber })
.Distinct().ToList()
.ForEach(s => _seasons.EnsureSeason(seriesId, s.SeasonId, s.SeasonNumber));
foreach (var episode in targetSeries.Episodes)
{
try
{
//DateTime throws an error in SQLServer per message below:
//SqlDateTime overflow. Must be between 1/1/1753 12:00:00 AM and 12/31/9999 11:59:59 PM.
//So lets hack it so it works for SQLServer (as well as SQLite), perhaps we can find a better solution
//Todo: Fix this hack
if (episode.FirstAired < new DateTime(1753, 1, 1))
episode.FirstAired = new DateTime(1753, 1, 1);
Logger.Trace("Updating info for [{0}] - S{1}E{2}", targetSeries.SeriesName, episode.SeasonNumber, episode.EpisodeNumber);
var newEpisode = new Episode
{
AirDate = episode.FirstAired,
EpisodeId = episode.Id,
EpisodeNumber = episode.EpisodeNumber,
Language = episode.Language.Abbriviation,
Overview = episode.Overview,
SeasonId = episode.SeasonId,
SeasonNumber = episode.SeasonNumber,
SeriesId = seriesId,
Title = episode.EpisodeName
};
if (_sonicRepo.Exists<Episode>(e => e.EpisodeId == newEpisode.EpisodeId))
{
updateList.Add(newEpisode);
}
else
{
newList.Add(newEpisode);
}
successCount++;
}
catch (Exception e)
{
Logger.FatalException(
String.Format("An error has occurred while updating episode info for series {0}", seriesId), e);
failCount++;
}
}
_sonicRepo.AddMany(newList);
_sonicRepo.UpdateMany(updateList);
Logger.Debug("Finished episode refresh for series:{0}. Successful:{1} - Failed:{2} ",
targetSeries.SeriesName, successCount, failCount);
}
public virtual void RefreshEpisodeInfo(Season season)
{
Logger.Info("Starting episode info refresh for season {0} of series:{1}", season.SeasonNumber,
season.SeriesId);
int successCount = 0;
int failCount = 0;
var targetSeries = _tvDb.GetSeries(season.SeriesId, true);
var updateList = new List<Episode>();
var newList = new List<Episode>();
foreach (var episode in targetSeries.Episodes.Where(e => e.SeasonId == season.SeasonId))
{
try
{
//DateTime throws an error in SQLServer per message below:
//SqlDateTime overflow. Must be between 1/1/1753 12:00:00 AM and 12/31/9999 11:59:59 PM.
//So lets hack it so it works for SQLServer (as well as SQLite), perhaps we can find a better solution
//Todo: Fix this hack
if (episode.FirstAired < new DateTime(1753, 1, 1))
episode.FirstAired = new DateTime(1753, 1, 1);
Logger.Trace("Updating info for series:{0} - episode:{1}", targetSeries.SeriesName,
episode.EpisodeNumber);
var newEpisode = new Episode
{
AirDate = episode.FirstAired,
EpisodeId = episode.Id,
EpisodeNumber = episode.EpisodeNumber,
Language = episode.Language.Abbriviation,
Overview = episode.Overview,
SeasonId = episode.SeasonId,
SeasonNumber = episode.SeasonNumber,
SeriesId = season.SeriesId,
Title = episode.EpisodeName
};
//TODO: Replace this db check with a local check. Should make things even faster
if (_sonicRepo.Exists<Episode>(e => e.EpisodeId == newEpisode.EpisodeId))
{
updateList.Add(newEpisode);
}
else
{
newList.Add(newEpisode);
}
successCount++;
}
catch (Exception e)
{
Logger.FatalException(
String.Format("An error has occurred while updating episode info for season {0} of series {1}",
season.SeasonNumber, season.SeriesId), e);
failCount++;
}
}
_sonicRepo.AddMany(newList);
_sonicRepo.UpdateMany(updateList);
Logger.Debug("Finished episode refresh for series:{0}. Successful:{1} - Failed:{2} ",
targetSeries.SeriesName, successCount, failCount);
}
public virtual void DeleteEpisode(int episodeId)
{
_sonicRepo.Delete<Episode>(episodeId);
}
public virtual void UpdateEpisode(Episode episode)
{
_sonicRepo.Update(episode);
}
}
}

View File

@ -40,10 +40,10 @@ namespace NzbDrone.Core.Providers
Logger.Info("History has been trimmed, items older than 30 days have been removed");
}
public virtual void Insert(History item)
public virtual void Add(History item)
{
_sonicRepo.Add(item);
Logger.Debug("Item added to history: {0} - {1}x{2:00}", item.Episode.Series.Title, item.Episode.SeasonNumber, item.Episode.EpisodeNumber);
Logger.Debug("Item added to history: {0}", item.NzbTitle);
}
public virtual bool Exists(int episodeId, QualityTypes quality, bool proper)
@ -52,7 +52,7 @@ namespace NzbDrone.Core.Providers
if (_sonicRepo.Exists<History>(h => h.EpisodeId == episodeId && h.Quality == quality && h.IsProper == proper))
return true;
Logger.Debug("Episode not in History. ID:{0} Q:{1} Proper:{2}", episodeId , quality, proper);
Logger.Debug("Episode not in History. ID:{0} Q:{1} Proper:{2}", episodeId, quality, proper);
return false;
}
}

View File

@ -68,29 +68,37 @@ namespace NzbDrone.Core.Providers.Indexer
foreach (var item in feed)
{
ProcessItem(item);
try
{
ProcessItem(item);
}
catch (Exception itemEx)
{
_logger.ErrorException("An error occurred while processing feed item", itemEx);
}
}
}
catch (Exception e)
catch (Exception feedEx)
{
_logger.ErrorException("An error occurred while processing feed", e);
_logger.ErrorException("An error occurred while processing feed", feedEx);
}
}
_logger.Info("Finished processing feeds from " + Settings.Name);
}
private void ProcessItem(SyndicationItem feedItem)
internal void ProcessItem(SyndicationItem feedItem)
{
_logger.Info("Processing RSS feed item " + feedItem.Title.Text);
_logger.Debug("Processing RSS feed item " + feedItem.Title.Text);
var parseResult = ParseFeed(feedItem);
if (parseResult != null)
if (parseResult != null && parseResult.SeriesId != 0)
{
if (!_seriesProvider.IsMonitored(parseResult.SeriesId))
{
_logger.Debug("{0} is present in the DB but not tracked. skipping.", parseResult.SeriesTitle);
_logger.Debug("{0} is present in the DB but not tracked. skipping.", parseResult.CleanTitle);
return;
}
@ -118,22 +126,22 @@ namespace NzbDrone.Core.Providers.Indexer
{
if (_historyProvider.Exists(episode.EpisodeId, parseResult.Quality, parseResult.Proper))
{
_logger.Debug("Episode in history: {0}", episode.ToString());
continue;
_logger.Debug("Episode in history: {0}", feedItem.Title.Text);
}
else
{
//TODO: Add episode to sab
//TODO: Add episode to sab
_historyProvider.Insert(new History
{
Date = DateTime.Now,
EpisodeId = episode.EpisodeId,
IsProper = parseResult.Proper,
NzbTitle = feedItem.Title.Text,
Quality = parseResult.Quality
});
_historyProvider.Add(new History
{
Date = DateTime.Now,
EpisodeId = episode.EpisodeId,
IsProper = parseResult.Proper,
NzbTitle = feedItem.Title.Text,
Quality = parseResult.Quality
});
}
}
}
}
@ -147,16 +155,16 @@ namespace NzbDrone.Core.Providers.Indexer
var episodeParseResult = Parser.ParseEpisodeInfo(item.Title.Text);
if (episodeParseResult == null) return CustomParser(item, null);
var seriesInfo = _seriesProvider.FindSeries(episodeParseResult.SeriesTitle);
var seriesInfo = _seriesProvider.FindSeries(episodeParseResult.CleanTitle);
if (seriesInfo != null)
{
episodeParseResult.SeriesId = seriesInfo.SeriesId;
episodeParseResult.SeriesTitle = seriesInfo.Title;
episodeParseResult.CleanTitle = seriesInfo.Title;
return CustomParser(item, episodeParseResult);
}
_logger.Debug("Unable to map {0} to any of series in database", episodeParseResult.SeriesTitle);
_logger.Debug("Unable to map {0} to any of series in database", episodeParseResult.CleanTitle);
return CustomParser(item, episodeParseResult);
}

View File

@ -112,7 +112,8 @@ namespace NzbDrone.Core.Providers
public virtual Series FindSeries(string title)
{
return _sonioRepo.Single<Series>(s => s.CleanTitle == Parser.NormalizeTitle(title));
var normalizeTitle = Parser.NormalizeTitle(title);
return _sonioRepo.Single<Series>(s => s.CleanTitle == normalizeTitle);
}
public virtual void UpdateSeries(Series series)

View File

@ -38,6 +38,7 @@ namespace NzbDrone.Core.Repository
public virtual EpisodeFile EpisodeFile { get; set; }
[SubSonicToManyRelation]
public virtual List<History> Histories { get; private set; }
public virtual List<History> Histories { get; protected set; }
}
}

View File

@ -1,6 +1,5 @@
@model List<NzbDrone.Web.Models.HistoryModel>
@using NzbDrone.Web.Models
@section Scripts{
<script type="text/javascript">
function onRowDataBound(e) {
@ -21,11 +20,9 @@
}
</script>
}
@section TitleContent{
History
History
}
@section ActionMenu{
@{Html.Telerik().Menu().Name("historyMenu").Items(items =>
{
@ -33,17 +30,17 @@
items.Add().Text("Purge History").Action("Purge", "History");
}).Render();}
}
@section MainContent{
@{Html.Telerik().Grid<HistoryModel>().Name("history")
.TableHtmlAttributes(new { @class = "Grid" })
.Columns(columns =>
{
columns.Bound(c => c.SeriesTitle).Title("Series Name").Width(120);
columns.Bound(c => c.SeasonNumber).Title("Season #").Width(10);
columns.Bound(c => c.EpisodeNumber).Title("Episode #").Width(10);
columns.Bound(c => c.EpisodeTitle).Title("Episode Title").Width(140);
columns.Bound(c => c.Quality).Title("Quality").Width(30);
columns.Bound(c => c.Date).Title("Date Grabbed").Width(60);
columns.Bound(c => c.SeasonNumber).Title("Season").Width(10);
columns.Bound(c => c.EpisodeNumber).Title("Episode").Width(10);
columns.Bound(c => c.EpisodeTitle).Title("Episode Title");
columns.Bound(c => c.Quality).Title("Quality").Width(10);
columns.Bound(c => c.Date).Title("Date/Time Grabbed");
})
.DetailView(detailView => detailView.ClientTemplate(
"<fieldset>" +
@ -57,7 +54,7 @@
.Pageable(
c =>
c.PageSize(50).Position(GridPagerPosition.Bottom).Style(GridPagerStyles.NextPrevious))
//.Filterable()
//.ClientEvents(c => c.OnRowDataBound("onRowDataBound"))
//.Filterable()
//.ClientEvents(c => c.OnRowDataBound("onRowDataBound"))
.Render();}
}
}

View File

@ -46,6 +46,10 @@ namespace NzbDrone
AppDomainException(e);
}
while (true)
{
Console.ReadLine();
}
}
private static void Attach()