Much better logging for searching

This commit is contained in:
Mark McDowall 2014-08-01 18:08:02 -07:00
parent c11c2c831d
commit 3b26e82644
2 changed files with 48 additions and 54 deletions

View File

@ -6,7 +6,6 @@ using NzbDrone.Common;
using NzbDrone.Common.Http;
using NzbDrone.Core.Indexers.Exceptions;
using NzbDrone.Core.IndexerSearch.Definitions;
using NzbDrone.Core.Instrumentation.Extensions;
using NzbDrone.Core.Parser.Model;
using System.Linq;
@ -46,75 +45,54 @@ namespace NzbDrone.Core.Indexers
public IList<ReleaseInfo> Fetch(IIndexer indexer, SeasonSearchCriteria searchCriteria)
{
_logger.Debug("Searching for {0}", searchCriteria);
var result = Fetch(indexer, searchCriteria, 0).DistinctBy(c => c.DownloadUrl).ToList();
_logger.Info("Finished searching {0} for {1}. Found {2}", indexer, searchCriteria, result.Count);
return result;
return Fetch(indexer, searchCriteria, 0).DistinctBy(c => c.DownloadUrl).ToList();
}
private IList<ReleaseInfo> Fetch(IIndexer indexer, SeasonSearchCriteria searchCriteria, int offset)
{
_logger.Debug("Searching for {0} offset: {1}", searchCriteria, offset);
var searchUrls = indexer.GetSeasonSearchUrls(searchCriteria.QueryTitles, searchCriteria.Series.TvRageId, searchCriteria.SeasonNumber, offset).ToList();
var result = Fetch(indexer, searchUrls);
_logger.Info("{0} offset {1}. Found {2}", indexer, offset, result.Count);
if (indexer.SupportsPaging && result.Count >= indexer.SupportedPageSize && offset < 900)
if (searchUrls.Any())
{
result.AddRange(Fetch(indexer, searchCriteria, offset + indexer.SupportedPageSize));
_logger.Debug("Searching for {0} offset: {1}", searchCriteria, offset);
var result = Fetch(indexer, searchUrls);
_logger.Info("{0} offset {1}. Found {2}", indexer, offset, result.Count);
if (indexer.SupportsPaging && result.Count >= indexer.SupportedPageSize && offset < 900)
{
result.AddRange(Fetch(indexer, searchCriteria, offset + indexer.SupportedPageSize));
}
//Only log finish for the first call to this recursive method
if (offset == 0)
{
_logger.Info("Finished searching {0} for {1}. Found {2}", indexer, searchCriteria, result.Count);
}
return result;
}
return result;
return new List<ReleaseInfo>();
}
public IList<ReleaseInfo> Fetch(IIndexer indexer, SingleEpisodeSearchCriteria searchCriteria)
{
_logger.Debug("Searching for {0}", searchCriteria);
var searchUrls = indexer.GetEpisodeSearchUrls(searchCriteria.QueryTitles, searchCriteria.Series.TvRageId, searchCriteria.SeasonNumber, searchCriteria.EpisodeNumber).ToList();
var result = Fetch(indexer, searchUrls);
if (searchUrls.Any())
{
_logger.Info("Finished searching {0} for {1}. Found {2}", indexer, searchCriteria, result.Count);
}
return result;
return Fetch(indexer, searchUrls, searchCriteria);
}
public IList<ReleaseInfo> Fetch(IIndexer indexer, DailyEpisodeSearchCriteria searchCriteria)
{
_logger.Debug("Searching for {0}", searchCriteria);
var searchUrls = indexer.GetDailyEpisodeSearchUrls(searchCriteria.QueryTitles, searchCriteria.Series.TvRageId, searchCriteria.AirDate).ToList();
var result = Fetch(indexer, searchUrls);
if (searchUrls.Any())
{
_logger.Info("Finished searching {0} for {1}. Found {2}", indexer, searchCriteria, result.Count);
}
return result;
return Fetch(indexer, searchUrls, searchCriteria);
}
public IList<ReleaseInfo> Fetch(IIndexer indexer, AnimeEpisodeSearchCriteria searchCriteria)
{
_logger.Debug("Searching for {0}", searchCriteria);
var searchUrls = indexer.GetAnimeEpisodeSearchUrls(searchCriteria.SceneTitles, searchCriteria.Series.TvRageId, searchCriteria.AbsoluteEpisodeNumber).ToList();
var result = Fetch(indexer, searchUrls);
if (searchUrls.Any())
{
_logger.Info("Finished searching {0} for {1}. Found {2}", indexer, searchCriteria, result.Count);
}
return result;
return Fetch(indexer, searchUrls, searchCriteria);
}
public IList<ReleaseInfo> Fetch(IIndexer indexer, SpecialEpisodeSearchCriteria searchCriteria)
@ -123,17 +101,33 @@ namespace NzbDrone.Core.Indexers
foreach (var episodeQueryTitle in searchCriteria.EpisodeQueryTitles)
{
_logger.Debug("Performing query of {0} for {1}", indexer, episodeQueryTitle);
searchUrls.AddRange(indexer.GetSearchUrls(episodeQueryTitle));
var urls = indexer.GetSearchUrls(episodeQueryTitle).ToList();
if (urls.Any())
{
_logger.Debug("Performing query of {0} for {1}", indexer, episodeQueryTitle);
searchUrls.AddRange(urls);
}
}
var result = Fetch(indexer, searchUrls);
return Fetch(indexer, searchUrls, searchCriteria);
}
if (searchUrls.Any())
private List<ReleaseInfo> Fetch(IIndexer indexer, IEnumerable<string> urls, SearchCriteriaBase searchCriteria)
{
var urlList = urls.ToList();
if (urlList.Empty())
{
_logger.Info("Finished searching {0} for {1}. Found {2}", indexer, searchCriteria, result.Count);
return new List<ReleaseInfo>();
}
_logger.Debug("Searching for {0}", searchCriteria);
var result = Fetch(indexer, urlList);
_logger.Info("Finished searching {0} for {1}. Found {2}", indexer, searchCriteria, result.Count);
return result;
}

View File

@ -74,13 +74,13 @@ namespace NzbDrone.Core.Jobs
var currentTasks = _scheduledTaskRepository.All().ToList();
_logger.Debug("Initializing jobs. Available: {0} Existing: {1}", defaultTasks.Count(), currentTasks.Count());
_logger.Trace("Initializing jobs. Available: {0} Existing: {1}", defaultTasks.Count(), currentTasks.Count());
foreach (var job in currentTasks)
{
if (!defaultTasks.Any(c => c.TypeName == job.TypeName))
{
_logger.Debug("Removing job from database '{0}'", job.TypeName);
_logger.Trace("Removing job from database '{0}'", job.TypeName);
_scheduledTaskRepository.Delete(job.Id);
}
}
@ -109,7 +109,7 @@ namespace NzbDrone.Core.Jobs
if (scheduledTask != null)
{
_logger.Debug("Updating last run time for: {0}", scheduledTask.TypeName);
_logger.Trace("Updating last run time for: {0}", scheduledTask.TypeName);
_scheduledTaskRepository.SetLastExecutionTime(scheduledTask.Id, DateTime.UtcNow);
}
}