Revert "Refactored to use the RageID lookup disabled, removed Sonarr API setup"

This reverts commit b279c871b1.
This commit is contained in:
unknown 2015-07-19 10:59:07 -06:00
parent b279c871b1
commit b3ecee2cc9
30 changed files with 1259 additions and 924 deletions

View File

@ -20,8 +20,6 @@ namespace Jackett
string DisplayDescription { get; }
Uri SiteLink { get; }
bool RequiresRageIDLookupDisabled { get; }
// Whether this indexer has been configured, verified and saved in the past and has the settings required for functioning
bool IsConfigured { get; }

View File

@ -29,8 +29,6 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
public event Action<IndexerInterface, JToken> OnSaveConfigurationRequested;
public event Action<IndexerInterface, string, Exception> OnResultParsingError;
@ -163,63 +161,68 @@ namespace Jackett.Indexers
public async Task<ReleaseInfo[]> PerformQuery(TorznabQuery query)
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = SearchUrl + HttpUtility.UrlEncode(searchString);
string results;
if (Program.IsWindows)
{
var request = CreateHttpRequest(new Uri(episodeSearchUrl));
request.Method = HttpMethod.Get;
var response = await client.SendAsync(request);
results = await response.Content.ReadAsStringAsync();
}
else
{
var response = await CurlHelper.GetAsync(episodeSearchUrl, cookieHeader);
results = Encoding.UTF8.GetString(response.Content);
}
try
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
{
var json = JObject.Parse(results);
foreach (JObject r in json["response"]["results"])
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = SearchUrl + HttpUtility.UrlEncode(searchString);
string results;
if (Program.IsWindows)
{
var request = CreateHttpRequest(new Uri(episodeSearchUrl));
request.Method = HttpMethod.Get;
var response = await client.SendAsync(request);
results = await response.Content.ReadAsStringAsync();
}
else
{
var response = await CurlHelper.GetAsync(episodeSearchUrl, cookieHeader);
results = Encoding.UTF8.GetString(response.Content);
}
try
{
DateTime pubDate = DateTime.MinValue;
double dateNum;
if (double.TryParse((string)r["groupTime"], out dateNum))
pubDate = UnixTimestampToDateTime(dateNum);
var groupName = (string)r["groupName"];
if (r["torrents"] is JArray)
var json = JObject.Parse(results);
foreach (JObject r in json["response"]["results"])
{
foreach (JObject t in r["torrents"])
DateTime pubDate = DateTime.MinValue;
double dateNum;
if (double.TryParse((string)r["groupTime"], out dateNum))
pubDate = UnixTimestampToDateTime(dateNum);
var groupName = (string)r["groupName"];
if (r["torrents"] is JArray)
{
foreach (JObject t in r["torrents"])
{
var release = new ReleaseInfo();
release.PublishDate = pubDate;
release.Title = groupName;
release.Description = groupName;
FillReleaseInfoFromJson(release, t);
releases.Add(release);
}
}
else
{
var release = new ReleaseInfo();
release.PublishDate = pubDate;
release.Title = groupName;
release.Description = groupName;
FillReleaseInfoFromJson(release, t);
FillReleaseInfoFromJson(release, r);
releases.Add(release);
}
}
else
{
var release = new ReleaseInfo();
release.PublishDate = pubDate;
release.Title = groupName;
release.Description = groupName;
FillReleaseInfoFromJson(release, r);
releases.Add(release);
}
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();

View File

@ -58,8 +58,6 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
const string BaseUrl = "https://animebytes.tv";
const string LoginUrl = BaseUrl + "/user/login";
const string SearchUrl = BaseUrl + "/torrents.php?filter_cat[1]=1";
@ -198,12 +196,15 @@ namespace Jackett.Indexers
public async Task<ReleaseInfo[]> PerformQuery(TorznabQuery query)
{
// The result list
var releases = new List<ReleaseInfo>();
var releases = new ConcurrentBag<ReleaseInfo>();
var titles = query.ShowTitles ?? new string[] { query.SearchTerm??string.Empty };
foreach (var result in await GetResults(query.SanitizedSearchTerm))
var tasks = titles.Select(async item =>
{
releases.Add(result);
}
foreach (var result in await GetResults(item))
releases.Add(result);
});
await Task.WhenAll(tasks);
return releases.ToArray();
}
@ -333,7 +334,7 @@ namespace Jackett.Indexers
release.MinimumRatio = 1;
release.MinimumSeedTime = 259200;
var downloadLink = links.Get(0);
// We dont know this so try to fake based on the release year
release.PublishDate = new DateTime(year, 1, 1);
release.PublishDate = release.PublishDate.AddDays(Math.Min(DateTime.Now.DayOfYear, 365) - 1);
@ -341,7 +342,7 @@ namespace Jackett.Indexers
var infoLink = links.Get(1);
release.Comments = new Uri(BaseUrl + "/" + infoLink.Attributes.GetAttribute("href"));
release.Guid = new Uri(BaseUrl + "/" + infoLink.Attributes.GetAttribute("href") + "&nh=" + Hash(title)); // Sonarr should dedupe on this url - allow a url per name.
release.Link = new Uri(BaseUrl + "/" + downloadLink.Attributes.GetAttribute("href"));
release.Link = new Uri(BaseUrl + "/" + downloadLink.Attributes.GetAttribute("href"));
// We dont actually have a release name >.> so try to create one
var releaseTags = infoLink.InnerText.Split("|".ToCharArray(), StringSplitOptions.RemoveEmptyEntries).ToList();

View File

@ -32,8 +32,6 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
public bool IsConfigured { get; private set; }
const string BaseUrl = "https://beyondhd.me";
@ -102,73 +100,75 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var results = await client.GetStringAsync(episodeSearchUrl);
try
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
{
CQ dom = results;
var rows = dom["table.torrenttable > tbody > tr.browse_color"];
foreach (var row in rows)
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var results = await client.GetStringAsync(episodeSearchUrl);
try
{
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
var qRow = row.Cq();
var qLink = row.ChildElements.ElementAt(2).FirstChild.Cq();
release.Link = new Uri(BaseUrl + "/" + qLink.Attr("href"));
var torrentID = qLink.Attr("href").Split('=').Last();
var descCol = row.ChildElements.ElementAt(3);
var qCommentLink = descCol.FirstChild.Cq();
release.Title = qCommentLink.Text();
release.Description = release.Title;
release.Comments = new Uri(BaseUrl + "/" + qCommentLink.Attr("href"));
release.Guid = release.Comments;
var dateStr = descCol.ChildElements.Last().Cq().Text().Split('|').Last().ToLowerInvariant().Replace("ago.", "").Trim();
var dateParts = dateStr.Split(new char[] { ' ', ' ' }, StringSplitOptions.RemoveEmptyEntries);
var timeSpan = TimeSpan.Zero;
for (var i = 0; i < dateParts.Length / 2; i++)
CQ dom = results;
var rows = dom["table.torrenttable > tbody > tr.browse_color"];
foreach (var row in rows)
{
var timeVal = ParseUtil.CoerceInt(dateParts[i * 2]);
var timeUnit = dateParts[i * 2 + 1];
if (timeUnit.Contains("year"))
timeSpan += TimeSpan.FromDays(365 * timeVal);
else if (timeUnit.Contains("month"))
timeSpan += TimeSpan.FromDays(30 * timeVal);
else if (timeUnit.Contains("day"))
timeSpan += TimeSpan.FromDays(timeVal);
else if (timeUnit.Contains("hour"))
timeSpan += TimeSpan.FromHours(timeVal);
else if (timeUnit.Contains("min"))
timeSpan += TimeSpan.FromMinutes(timeVal);
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
var qRow = row.Cq();
var qLink = row.ChildElements.ElementAt(2).FirstChild.Cq();
release.Link = new Uri(BaseUrl + "/" + qLink.Attr("href"));
var torrentID = qLink.Attr("href").Split('=').Last();
var descCol = row.ChildElements.ElementAt(3);
var qCommentLink = descCol.FirstChild.Cq();
release.Title = qCommentLink.Text();
release.Description = release.Title;
release.Comments = new Uri(BaseUrl + "/" + qCommentLink.Attr("href"));
release.Guid = release.Comments;
var dateStr = descCol.ChildElements.Last().Cq().Text().Split('|').Last().ToLowerInvariant().Replace("ago.", "").Trim();
var dateParts = dateStr.Split(new char[] { ' ', ' ' }, StringSplitOptions.RemoveEmptyEntries);
var timeSpan = TimeSpan.Zero;
for (var i = 0; i < dateParts.Length / 2; i++)
{
var timeVal = ParseUtil.CoerceInt(dateParts[i * 2]);
var timeUnit = dateParts[i * 2 + 1];
if (timeUnit.Contains("year"))
timeSpan += TimeSpan.FromDays(365 * timeVal);
else if (timeUnit.Contains("month"))
timeSpan += TimeSpan.FromDays(30 * timeVal);
else if (timeUnit.Contains("day"))
timeSpan += TimeSpan.FromDays(timeVal);
else if (timeUnit.Contains("hour"))
timeSpan += TimeSpan.FromHours(timeVal);
else if (timeUnit.Contains("min"))
timeSpan += TimeSpan.FromMinutes(timeVal);
}
release.PublishDate = DateTime.SpecifyKind(DateTime.Now - timeSpan, DateTimeKind.Local);
var sizeEl = row.ChildElements.ElementAt(7);
var sizeVal = ParseUtil.CoerceFloat(sizeEl.ChildNodes.First().NodeValue);
var sizeUnit = sizeEl.ChildNodes.Last().NodeValue;
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(9).Cq().Text());
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(10).Cq().Text()) + release.Seeders;
releases.Add(release);
}
release.PublishDate = DateTime.SpecifyKind(DateTime.Now - timeSpan, DateTimeKind.Local);
var sizeEl = row.ChildElements.ElementAt(7);
var sizeVal = ParseUtil.CoerceFloat(sizeEl.ChildNodes.First().NodeValue);
var sizeUnit = sizeEl.ChildNodes.Last().NodeValue;
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(9).Cq().Text());
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(10).Cq().Text()) + release.Seeders;
releases.Add(release);
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -31,8 +31,6 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
static string BaseUrl = "https://www.bit-hdtv.com";
static string LoginUrl = BaseUrl + "/takelogin.php";
static string SearchUrl = BaseUrl + "/torrents.php?cat=0&search=";
@ -111,49 +109,52 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = SearchUrl + HttpUtility.UrlEncode(searchString);
var results = await client.GetStringAsync(episodeSearchUrl);
try
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
{
CQ dom = results;
dom["#needseed"].Remove();
var rows = dom["table[width='750'] > tbody"].Children();
foreach (var row in rows.Skip(1))
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = SearchUrl + HttpUtility.UrlEncode(searchString);
var results = await client.GetStringAsync(episodeSearchUrl);
try
{
CQ dom = results;
dom["#needseed"].Remove();
var rows = dom["table[width='750'] > tbody"].Children();
foreach (var row in rows.Skip(1))
{
var release = new ReleaseInfo();
var release = new ReleaseInfo();
var qRow = row.Cq();
var qLink = qRow.Children().ElementAt(2).Cq().Children("a").First();
var qRow = row.Cq();
var qLink = qRow.Children().ElementAt(2).Cq().Children("a").First();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qLink.Attr("title");
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + qLink.Attr("href"));
release.Comments = release.Guid;
release.Link = new Uri(string.Format(DownloadUrl, qLink.Attr("href").Split('=')[1]));
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qLink.Attr("title");
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + qLink.Attr("href"));
release.Comments = release.Guid;
release.Link = new Uri(string.Format(DownloadUrl, qLink.Attr("href").Split('=')[1]));
var dateString = qRow.Children().ElementAt(5).Cq().Text().Trim();
var pubDate = DateTime.ParseExact(dateString, "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture);
release.PublishDate = DateTime.SpecifyKind(pubDate, DateTimeKind.Local);
var dateString = qRow.Children().ElementAt(5).Cq().Text().Trim();
var pubDate = DateTime.ParseExact(dateString, "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture);
release.PublishDate = DateTime.SpecifyKind(pubDate, DateTimeKind.Local);
var sizeCol = qRow.Children().ElementAt(6);
var sizeVal = sizeCol.ChildNodes[0].NodeValue;
var sizeUnit = sizeCol.ChildNodes[2].NodeValue;
release.Size = ReleaseInfo.GetBytes(sizeUnit, ParseUtil.CoerceFloat(sizeVal));
var sizeCol = qRow.Children().ElementAt(6);
var sizeVal = sizeCol.ChildNodes[0].NodeValue;
var sizeUnit = sizeCol.ChildNodes[2].NodeValue;
release.Size = ReleaseInfo.GetBytes(sizeUnit, ParseUtil.CoerceFloat(sizeVal));
release.Seeders = ParseUtil.CoerceInt(qRow.Children().ElementAt(8).Cq().Text().Trim());
release.Peers = ParseUtil.CoerceInt(qRow.Children().ElementAt(9).Cq().Text().Trim()) + release.Seeders;
release.Seeders = ParseUtil.CoerceInt(qRow.Children().ElementAt(8).Cq().Text().Trim());
release.Peers = ParseUtil.CoerceInt(qRow.Children().ElementAt(9).Cq().Text().Trim()) + release.Seeders;
releases.Add(release);
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();

View File

@ -71,8 +71,6 @@ namespace Jackett
public Uri SiteLink { get { return new Uri(BaseUrl); } }
public bool RequiresRageIDLookupDisabled { get { return true; } }
public bool IsConfigured { get; private set; }
public async Task<ConfigurationData> GetConfigurationForSetup()
@ -132,58 +130,63 @@ namespace Jackett
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format("{0}?search={1}&cat=0", SearchUrl, HttpUtility.UrlEncode(searchString));
var results = await client.GetStringAsync(episodeSearchUrl);
try
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
{
CQ dom = results;
var table = dom["tbody > tr > .latest"].Parent().Parent();
foreach (var row in table.Children().Skip(1))
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format("{0}?search={1}&cat=0", SearchUrl, HttpUtility.UrlEncode(searchString));
var results = await client.GetStringAsync(episodeSearchUrl);
try
{
var release = new ReleaseInfo();
CQ dom = results;
CQ qDetailsCol = row.ChildElements.ElementAt(1).Cq();
CQ qLink = qDetailsCol.Children("a").First();
var table = dom["tbody > tr > .latest"].Parent().Parent();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Comments = new Uri(BaseUrl + "/" + qLink.Attr("href"));
release.Guid = release.Comments;
release.Title = qLink.Attr("title");
release.Description = release.Title;
foreach (var row in table.Children().Skip(1))
{
var release = new ReleaseInfo();
//"Tuesday, June 11th 2013 at 03:52:53 AM" to...
//"Tuesday June 11 2013 03:52:53 AM"
var timestamp = qDetailsCol.Children("font").Text().Trim() + " ";
var timeParts = new List<string>(timestamp.Replace(" at", "").Replace(",", "").Split(' '));
timeParts[2] = Regex.Replace(timeParts[2], "[^0-9.]", "");
var formattedTimeString = string.Join(" ", timeParts.ToArray()).Trim();
var date = DateTime.ParseExact(formattedTimeString, "dddd MMMM d yyyy hh:mm:ss tt", CultureInfo.InvariantCulture);
release.PublishDate = DateTime.SpecifyKind(date, DateTimeKind.Utc).ToLocalTime();
CQ qDetailsCol = row.ChildElements.ElementAt(1).Cq();
CQ qLink = qDetailsCol.Children("a").First();
release.Link = new Uri(BaseUrl + "/" + row.ChildElements.ElementAt(2).Cq().Children("a.index").Attr("href"));
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Comments = new Uri(BaseUrl + "/" + qLink.Attr("href"));
release.Guid = release.Comments;
release.Title = qLink.Attr("title");
release.Description = release.Title;
var sizeCol = row.ChildElements.ElementAt(6);
var sizeVal = ParseUtil.CoerceFloat(sizeCol.ChildNodes[0].NodeValue);
var sizeUnit = sizeCol.ChildNodes[2].NodeValue;
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
//"Tuesday, June 11th 2013 at 03:52:53 AM" to...
//"Tuesday June 11 2013 03:52:53 AM"
var timestamp = qDetailsCol.Children("font").Text().Trim() + " ";
var timeParts = new List<string>(timestamp.Replace(" at", "").Replace(",", "").Split(' '));
timeParts[2] = Regex.Replace(timeParts[2], "[^0-9.]", "");
var formattedTimeString = string.Join(" ", timeParts.ToArray()).Trim();
var date = DateTime.ParseExact(formattedTimeString, "dddd MMMM d yyyy hh:mm:ss tt", CultureInfo.InvariantCulture);
release.PublishDate = DateTime.SpecifyKind(date, DateTimeKind.Utc).ToLocalTime();
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(8).Cq().Text());
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(9).Cq().Text()) + release.Seeders;
release.Link = new Uri(BaseUrl + "/" + row.ChildElements.ElementAt(2).Cq().Children("a.index").Attr("href"));
//if (!release.Title.ToLower().Contains(title.ToLower()))
// continue;
var sizeCol = row.ChildElements.ElementAt(6);
var sizeVal = ParseUtil.CoerceFloat(sizeCol.ChildNodes[0].NodeValue);
var sizeUnit = sizeCol.ChildNodes[2].NodeValue;
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
releases.Add(release);
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(8).Cq().Text());
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(9).Cq().Text()) + release.Seeders;
//if (!release.Title.ToLower().Contains(title.ToLower()))
// continue;
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();

View File

@ -44,9 +44,8 @@ namespace Jackett.Indexers
public Uri SiteLink
{
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
}
public bool IsConfigured { get; private set; }
const string BaseUrl = "http://www.frenchtorrentdb.com/";
@ -115,57 +114,59 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var message = new HttpRequestMessage();
message.Method = HttpMethod.Get;
message.RequestUri = new Uri(episodeSearchUrl);
var response = await client.SendAsync(message);
var results = await response.Content.ReadAsStringAsync();
try
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
{
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
CQ dom = results;
var rows = dom[".results_index ul"];
foreach (var row in rows)
var message = new HttpRequestMessage();
message.Method = HttpMethod.Get;
message.RequestUri = new Uri(episodeSearchUrl);
var response = await client.SendAsync(message);
var results = await response.Content.ReadAsStringAsync();
try
{
var release = new ReleaseInfo();
CQ qRow = row.Cq();
CQ qLink = qRow.Find("li.torrents_name > .torrents_name_link").First();
CQ qDlLink = qRow.Find("li.torrents_download > a").First();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qLink.Text().Trim();
release.Description = release.Title;
release.Comments = new Uri(BaseUrl + "/" + qLink.Attr("href").TrimStart('/'));
release.Guid = release.Comments;
release.Link = new Uri(BaseUrl + "/" + qDlLink.Attr("href").TrimStart('/'));
release.PublishDate = DateTime.Now;
release.Seeders = ParseUtil.CoerceInt(qRow.Find("li.torrents_seeders").Text());
release.Peers = ParseUtil.CoerceInt(qRow.Find("li.torrents_leechers").Text()) + release.Seeders;
var sizeParts = qRow.Find("li.torrents_size").Text().Split(' ');
var sizeVal = ParseUtil.CoerceFloat(sizeParts[0]);
var sizeUnit = sizeParts[1];
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
CQ dom = results;
var rows = dom[".results_index ul"];
foreach (var row in rows)
{
var release = new ReleaseInfo();
CQ qRow = row.Cq();
CQ qLink = qRow.Find("li.torrents_name > .torrents_name_link").First();
CQ qDlLink = qRow.Find("li.torrents_download > a").First();
releases.Add(release);
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qLink.Text().Trim();
release.Description = release.Title;
release.Comments = new Uri(BaseUrl + "/" + qLink.Attr("href").TrimStart('/'));
release.Guid = release.Comments;
release.Link = new Uri(BaseUrl + "/" + qDlLink.Attr("href").TrimStart('/'));
release.PublishDate = DateTime.Now;
release.Seeders = ParseUtil.CoerceInt(qRow.Find("li.torrents_seeders").Text());
release.Peers = ParseUtil.CoerceInt(qRow.Find("li.torrents_leechers").Text()) + release.Seeders;
var sizeParts = qRow.Find("li.torrents_size").Text().Split(' ');
var sizeVal = ParseUtil.CoerceFloat(sizeParts[0]);
var sizeUnit = sizeParts[1];
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();
}
public Task<byte[]> Download(Uri link)
{
return client.GetByteArrayAsync(link);
}
}
}
}

View File

@ -37,8 +37,6 @@ namespace Jackett
public Uri SiteLink { get { return new Uri(BaseUrl); } }
public bool RequiresRageIDLookupDisabled { get { return true; } }
public event Action<IndexerInterface, JToken> OnSaveConfigurationRequested;
public Freshon()
@ -120,65 +118,68 @@ namespace Jackett
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
string episodeSearchUrl;
if (string.IsNullOrEmpty(query.SanitizedSearchTerm))
episodeSearchUrl = SearchUrl;
else
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
{
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
episodeSearchUrl = string.Format("{0}?search={1}&cat=0", SearchUrl, HttpUtility.UrlEncode(searchString));
}
string episodeSearchUrl;
var request = CreateHttpRequest(new Uri(episodeSearchUrl));
var response = await client.SendAsync(request);
var results = await response.Content.ReadAsStringAsync();
try
{
CQ dom = results;
var rows = dom["#highlight > tbody > tr"];
foreach (var row in rows.Skip(1))
if (string.IsNullOrEmpty(title))
episodeSearchUrl = SearchUrl;
else
{
var release = new ReleaseInfo();
var qRow = row.Cq();
var qLink = qRow.Find("a.torrent_name_link").First();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qLink.Attr("title");
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + qLink.Attr("href"));
release.Comments = release.Guid;
release.Link = new Uri(BaseUrl + qRow.Find("td.table_links > a").First().Attr("href"));
DateTime pubDate;
var dateString = qRow.Find("td.table_added").Text().Trim();
if (dateString.StartsWith("Today "))
pubDate = (DateTime.UtcNow + TimeSpan.Parse(dateString.Split(' ')[1])).ToLocalTime();
else if (dateString.StartsWith("Yesterday "))
pubDate = (DateTime.UtcNow + TimeSpan.Parse(dateString.Split(' ')[1]) - TimeSpan.FromDays(1)).ToLocalTime();
else
pubDate = DateTime.ParseExact(dateString, "d-MMM-yyyy HH:mm:ss", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal).ToLocalTime();
release.PublishDate = pubDate;
release.Seeders = ParseUtil.CoerceInt(qRow.Find("td.table_seeders").Text().Trim());
release.Peers = ParseUtil.CoerceInt(qRow.Find("td.table_leechers").Text().Trim()) + release.Seeders;
var sizeCol = qRow.Find("td.table_size")[0];
var sizeVal = ParseUtil.CoerceFloat(sizeCol.ChildNodes[0].NodeValue.Trim());
var sizeUnit = sizeCol.ChildNodes[2].NodeValue.Trim();
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
releases.Add(release);
var searchString = title + " " + query.GetEpisodeSearchString();
episodeSearchUrl = string.Format("{0}?search={1}&cat=0", SearchUrl, HttpUtility.UrlEncode(searchString));
}
var request = CreateHttpRequest(new Uri(episodeSearchUrl));
var response = await client.SendAsync(request);
var results = await response.Content.ReadAsStringAsync();
try
{
CQ dom = results;
var rows = dom["#highlight > tbody > tr"];
foreach (var row in rows.Skip(1))
{
var release = new ReleaseInfo();
var qRow = row.Cq();
var qLink = qRow.Find("a.torrent_name_link").First();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qLink.Attr("title");
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + qLink.Attr("href"));
release.Comments = release.Guid;
release.Link = new Uri(BaseUrl + qRow.Find("td.table_links > a").First().Attr("href"));
DateTime pubDate;
var dateString = qRow.Find("td.table_added").Text().Trim();
if (dateString.StartsWith("Today "))
pubDate = (DateTime.UtcNow + TimeSpan.Parse(dateString.Split(' ')[1])).ToLocalTime();
else if (dateString.StartsWith("Yesterday "))
pubDate = (DateTime.UtcNow + TimeSpan.Parse(dateString.Split(' ')[1]) - TimeSpan.FromDays(1)).ToLocalTime();
else
pubDate = DateTime.ParseExact(dateString, "d-MMM-yyyy HH:mm:ss", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal).ToLocalTime();
release.PublishDate = pubDate;
release.Seeders = ParseUtil.CoerceInt(qRow.Find("td.table_seeders").Text().Trim());
release.Peers = ParseUtil.CoerceInt(qRow.Find("td.table_leechers").Text().Trim()) + release.Seeders;
var sizeCol = qRow.Find("td.table_size")[0];
var sizeVal = ParseUtil.CoerceFloat(sizeCol.ChildNodes[0].NodeValue.Trim());
var sizeUnit = sizeCol.ChildNodes[2].NodeValue.Trim();
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();

View File

@ -58,8 +58,6 @@ namespace Jackett.Indexers
get { return new Uri(DefaultUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
public bool IsConfigured
{
get;
@ -133,10 +131,11 @@ namespace Jackett.Indexers
List<ReleaseInfo> releases = new List<ReleaseInfo>();
List<string> searchurls = new List<string>();
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
for (int page = 0; page < MAXPAGES; page++)
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
{
searchurls.Add(string.Format(SearchUrl, HttpUtility.UrlEncode(searchString.Trim()), page));
var searchString = title + " " + query.GetEpisodeSearchString();
for (int page = 0; page < MAXPAGES; page++)
searchurls.Add(string.Format(SearchUrl, HttpUtility.UrlEncode(searchString.Trim()), page));
}
foreach (string SearchUrl in searchurls)
@ -177,7 +176,7 @@ namespace Jackett.Indexers
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
int seeders, peers;
if (ParseUtil.TryCoerceInt(qRow.Find("td").Get(9).FirstChild.FirstChild.InnerText, out seeders))
@ -215,7 +214,7 @@ namespace Jackett.Indexers
string[] dateSplit = qRow.Find("td.mainblockcontent").Get(5).InnerHTML.Split(',');
string dateString = dateSplit[1].Substring(0, dateSplit[1].IndexOf('>'));
release.PublishDate = DateTime.Parse(dateString, CultureInfo.InvariantCulture);
releases.Add(release);
}
}

View File

@ -24,8 +24,6 @@ namespace Jackett.Indexers
public Uri SiteLink { get { return new Uri(BaseUrl); } }
public bool RequiresRageIDLookupDisabled { get { return true; } }
public bool IsConfigured { get; private set; }
static string chromeUserAgent = BrowserUtil.ChromeUserAgent;
@ -122,70 +120,75 @@ namespace Jackett.Indexers
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = SearchUrl + HttpUtility.UrlEncode(searchString);
var request = CreateHttpRequest(new Uri(episodeSearchUrl));
var response = await client.SendAsync(request);
var results = await response.Content.ReadAsStringAsync();
try
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
{
CQ dom = results;
var rows = dom["table.torrents > tbody > tr"];
foreach (var row in rows.Skip(1))
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = SearchUrl + HttpUtility.UrlEncode(searchString);
var request = CreateHttpRequest(new Uri(episodeSearchUrl));
var response = await client.SendAsync(request);
var results = await response.Content.ReadAsStringAsync();
try
{
var release = new ReleaseInfo();
CQ dom = results;
var qRow = row.Cq();
var rows = dom["table.torrents > tbody > tr"];
foreach (var row in rows.Skip(1))
{
var release = new ReleaseInfo();
var qTitleLink = qRow.Find("a.t_title").First();
release.Title = qTitleLink.Text().Trim();
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + qTitleLink.Attr("href"));
release.Comments = release.Guid;
var qRow = row.Cq();
DateTime pubDate;
var descString = qRow.Find(".t_ctime").Text();
var dateString = descString.Split('|').Last().Trim();
dateString = dateString.Split(new string[] { " by " }, StringSplitOptions.None)[0];
var dateValue = ParseUtil.CoerceFloat(dateString.Split(' ')[0]);
var dateUnit = dateString.Split(' ')[1];
if (dateUnit.Contains("minute"))
pubDate = DateTime.Now - TimeSpan.FromMinutes(dateValue);
else if (dateUnit.Contains("hour"))
pubDate = DateTime.Now - TimeSpan.FromHours(dateValue);
else if (dateUnit.Contains("day"))
pubDate = DateTime.Now - TimeSpan.FromDays(dateValue);
else if (dateUnit.Contains("week"))
pubDate = DateTime.Now - TimeSpan.FromDays(7 * dateValue);
else if (dateUnit.Contains("month"))
pubDate = DateTime.Now - TimeSpan.FromDays(30 * dateValue);
else if (dateUnit.Contains("year"))
pubDate = DateTime.Now - TimeSpan.FromDays(365 * dateValue);
else
pubDate = DateTime.MinValue;
release.PublishDate = pubDate;
var qTitleLink = qRow.Find("a.t_title").First();
release.Title = qTitleLink.Text().Trim();
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + qTitleLink.Attr("href"));
release.Comments = release.Guid;
var qLink = row.ChildElements.ElementAt(3).Cq().Children("a");
release.Link = new Uri(BaseUrl + qLink.Attr("href"));
DateTime pubDate;
var descString = qRow.Find(".t_ctime").Text();
var dateString = descString.Split('|').Last().Trim();
dateString = dateString.Split(new string[] { " by " }, StringSplitOptions.None)[0];
var dateValue = ParseUtil.CoerceFloat(dateString.Split(' ')[0]);
var dateUnit = dateString.Split(' ')[1];
if (dateUnit.Contains("minute"))
pubDate = DateTime.Now - TimeSpan.FromMinutes(dateValue);
else if (dateUnit.Contains("hour"))
pubDate = DateTime.Now - TimeSpan.FromHours(dateValue);
else if (dateUnit.Contains("day"))
pubDate = DateTime.Now - TimeSpan.FromDays(dateValue);
else if (dateUnit.Contains("week"))
pubDate = DateTime.Now - TimeSpan.FromDays(7 * dateValue);
else if (dateUnit.Contains("month"))
pubDate = DateTime.Now - TimeSpan.FromDays(30 * dateValue);
else if (dateUnit.Contains("year"))
pubDate = DateTime.Now - TimeSpan.FromDays(365 * dateValue);
else
pubDate = DateTime.MinValue;
release.PublishDate = pubDate;
var sizeStr = row.ChildElements.ElementAt(5).Cq().Text().Trim();
var sizeVal = ParseUtil.CoerceFloat(sizeStr.Split(' ')[0]);
var sizeUnit = sizeStr.Split(' ')[1];
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
var qLink = row.ChildElements.ElementAt(3).Cq().Children("a");
release.Link = new Uri(BaseUrl + qLink.Attr("href"));
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".t_seeders").Text().Trim());
release.Peers = ParseUtil.CoerceInt(qRow.Find(".t_leechers").Text().Trim()) + release.Seeders;
var sizeStr = row.ChildElements.ElementAt(5).Cq().Text().Trim();
var sizeVal = ParseUtil.CoerceFloat(sizeStr.Split(' ')[0]);
var sizeUnit = sizeStr.Split(' ')[1];
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
releases.Add(release);
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".t_seeders").Text().Trim());
release.Peers = ParseUtil.CoerceInt(qRow.Find(".t_leechers").Text().Trim()) + release.Seeders;
releases.Add(release);
}
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
return releases.ToArray();

View File

@ -29,7 +29,6 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
public event Action<IndexerInterface, JToken> OnSaveConfigurationRequested;
public event Action<IndexerInterface, string, Exception> OnResultParsingError;
@ -146,63 +145,67 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = SearchUrl + HttpUtility.UrlEncode(searchString);
string results;
if (Program.IsWindows)
{
results = await client.GetStringAsync(episodeSearchUrl, retries);
}
else
{
var response = await CurlHelper.GetAsync(episodeSearchUrl, cookieHeader);
results = Encoding.UTF8.GetString(response.Content);
}
try
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
{
var json = JObject.Parse(results);
foreach (JObject r in json["response"]["results"])
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = SearchUrl + HttpUtility.UrlEncode(searchString);
string results;
if (Program.IsWindows)
{
results = await client.GetStringAsync(episodeSearchUrl, retries);
}
else
{
var response = await CurlHelper.GetAsync(episodeSearchUrl, cookieHeader);
results = Encoding.UTF8.GetString(response.Content);
}
try
{
DateTime pubDate = DateTime.MinValue;
double dateNum;
if (double.TryParse((string)r["groupTime"], out dateNum))
{
pubDate = UnixTimestampToDateTime(dateNum);
pubDate = DateTime.SpecifyKind(pubDate, DateTimeKind.Utc).ToLocalTime();
}
var groupName = (string)r["groupName"];
if (r["torrents"] is JArray)
var json = JObject.Parse(results);
foreach (JObject r in json["response"]["results"])
{
foreach (JObject t in r["torrents"])
DateTime pubDate = DateTime.MinValue;
double dateNum;
if (double.TryParse((string)r["groupTime"], out dateNum))
{
pubDate = UnixTimestampToDateTime(dateNum);
pubDate = DateTime.SpecifyKind(pubDate, DateTimeKind.Utc).ToLocalTime();
}
var groupName = (string)r["groupName"];
if (r["torrents"] is JArray)
{
foreach (JObject t in r["torrents"])
{
var release = new ReleaseInfo();
release.PublishDate = pubDate;
release.Title = groupName;
release.Description = groupName;
FillReleaseInfoFromJson(release, t);
releases.Add(release);
}
}
else
{
var release = new ReleaseInfo();
release.PublishDate = pubDate;
release.Title = groupName;
release.Description = groupName;
FillReleaseInfoFromJson(release, t);
FillReleaseInfoFromJson(release, r);
releases.Add(release);
}
}
else
{
var release = new ReleaseInfo();
release.PublishDate = pubDate;
release.Title = groupName;
release.Description = groupName;
FillReleaseInfoFromJson(release, r);
releases.Add(release);
}
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();

View File

@ -31,8 +31,6 @@ namespace Jackett.Indexers
get { return new Uri("https://rarbg.com"); }
}
public bool RequiresRageIDLookupDisabled { get { return false; } }
public bool IsConfigured { get; private set; }
const string DefaultUrl = "http://torrentapi.org";
@ -129,7 +127,7 @@ namespace Jackett.Indexers
if (query.RageID != 0)
searchUrl = string.Format(baseUrl + SearchTVRageUrl, query.RageID, token);
else
searchUrl = string.Format(baseUrl + SearchQueryUrl, query.SanitizedSearchTerm, token);
searchUrl = string.Format(baseUrl + SearchQueryUrl, query.SearchTerm, token);
var request = CreateHttpRequest(searchUrl);
var response = await client.SendAsync(request);

View File

@ -31,8 +31,6 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
const string BaseUrl = "https://sceneaccess.eu";
const string LoginUrl = BaseUrl + "/login";
const string SearchUrl = BaseUrl + "/{0}?method=1&c{1}=1&search={2}";
@ -127,61 +125,64 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var searchSection = string.IsNullOrEmpty(query.Episode) ? "archive" : "browse";
var searchCategory = string.IsNullOrEmpty(query.Episode) ? "26" : "27";
var searchUrl = string.Format(SearchUrl, searchSection, searchCategory, searchString);
string results;
if (Program.IsWindows)
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
{
results = await client.GetStringAsync(searchUrl);
}
else
{
var response = await CurlHelper.GetAsync(searchUrl, cookieHeader);
results = Encoding.UTF8.GetString(response.Content);
}
var searchString = title + " " + query.GetEpisodeSearchString();
var searchSection = string.IsNullOrEmpty(query.Episode) ? "archive" : "browse";
var searchCategory = string.IsNullOrEmpty(query.Episode) ? "26" : "27";
try
{
CQ dom = results;
var rows = dom["#torrents-table > tbody > tr.tt_row"];
foreach (var row in rows)
var searchUrl = string.Format(SearchUrl, searchSection, searchCategory, searchString);
string results;
if (Program.IsWindows)
{
CQ qRow = row.Cq();
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 129600;
release.Title = qRow.Find(".ttr_name > a").Text();
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + "/" + qRow.Find(".ttr_name > a").Attr("href"));
release.Comments = release.Guid;
release.Link = new Uri(BaseUrl + "/" + qRow.Find(".td_dl > a").Attr("href"));
var sizeStr = qRow.Find(".ttr_size").Contents()[0].NodeValue;
var sizeParts = sizeStr.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
var timeStr = qRow.Find(".ttr_added").Text();
DateTime time;
if (DateTime.TryParseExact(timeStr, "yyyy-MM-ddHH:mm:ss", CultureInfo.InvariantCulture, DateTimeStyles.None, out time))
{
release.PublishDate = time;
}
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".ttr_seeders").Text());
release.Peers = ParseUtil.CoerceInt(qRow.Find(".ttr_leechers").Text()) + release.Seeders;
releases.Add(release);
results = await client.GetStringAsync(searchUrl);
}
else
{
var response = await CurlHelper.GetAsync(searchUrl, cookieHeader);
results = Encoding.UTF8.GetString(response.Content);
}
try
{
CQ dom = results;
var rows = dom["#torrents-table > tbody > tr.tt_row"];
foreach (var row in rows)
{
CQ qRow = row.Cq();
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 129600;
release.Title = qRow.Find(".ttr_name > a").Text();
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + "/" + qRow.Find(".ttr_name > a").Attr("href"));
release.Comments = release.Guid;
release.Link = new Uri(BaseUrl + "/" + qRow.Find(".td_dl > a").Attr("href"));
var sizeStr = qRow.Find(".ttr_size").Contents()[0].NodeValue;
var sizeParts = sizeStr.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
var timeStr = qRow.Find(".ttr_added").Text();
DateTime time;
if (DateTime.TryParseExact(timeStr, "yyyy-MM-ddHH:mm:ss", CultureInfo.InvariantCulture, DateTimeStyles.None, out time))
{
release.PublishDate = time;
}
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".ttr_seeders").Text());
release.Peers = ParseUtil.CoerceInt(qRow.Find(".ttr_leechers").Text()) + release.Seeders;
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();

View File

@ -33,8 +33,6 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
public bool IsConfigured { get; private set; }
const string BaseUrl = "https://www.scenetime.com";
@ -120,52 +118,55 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var searchContent = GetSearchFormData(searchString);
var response = await client.PostAsync(SearchUrl, searchContent);
var results = await response.Content.ReadAsStringAsync();
try
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
{
CQ dom = results;
var rows = dom["tr.browse"];
foreach (var row in rows)
var searchString = title + " " + query.GetEpisodeSearchString();
var searchContent = GetSearchFormData(searchString);
var response = await client.PostAsync(SearchUrl, searchContent);
var results = await response.Content.ReadAsStringAsync();
try
{
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
CQ dom = results;
var rows = dom["tr.browse"];
foreach (var row in rows)
{
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
var descCol = row.ChildElements.ElementAt(1);
var qDescCol = descCol.Cq();
var qLink = qDescCol.Find("a");
release.Title = qLink.Text();
release.Description = release.Title;
release.Comments = new Uri(BaseUrl + "/" + qLink.Attr("href"));
release.Guid = release.Comments;
var torrentId = qLink.Attr("href").Split('=')[1];
release.Link = new Uri(string.Format(DownloadUrl, torrentId));
var descCol = row.ChildElements.ElementAt(1);
var qDescCol = descCol.Cq();
var qLink = qDescCol.Find("a");
release.Title = qLink.Text();
release.Description = release.Title;
release.Comments = new Uri(BaseUrl + "/" + qLink.Attr("href"));
release.Guid = release.Comments;
var torrentId = qLink.Attr("href").Split('=')[1];
release.Link = new Uri(string.Format(DownloadUrl, torrentId));
var dateStr = descCol.ChildNodes.Last().NodeValue.Trim();
var euDate = DateTime.ParseExact(dateStr, "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture);
var localDate = TimeZoneInfo.ConvertTimeToUtc(euDate, TimeZoneInfo.FindSystemTimeZoneById("Central European Standard Time")).ToLocalTime();
release.PublishDate = localDate;
var dateStr = descCol.ChildNodes.Last().NodeValue.Trim();
var euDate = DateTime.ParseExact(dateStr, "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture);
var localDate = TimeZoneInfo.ConvertTimeToUtc(euDate, TimeZoneInfo.FindSystemTimeZoneById("Central European Standard Time")).ToLocalTime();
release.PublishDate = localDate;
var sizeNodes = row.ChildElements.ElementAt(3).ChildNodes;
var sizeVal = sizeNodes.First().NodeValue;
var sizeUnit = sizeNodes.Last().NodeValue;
release.Size = ReleaseInfo.GetBytes(sizeUnit, ParseUtil.CoerceFloat(sizeVal));
var sizeNodes = row.ChildElements.ElementAt(3).ChildNodes;
var sizeVal = sizeNodes.First().NodeValue;
var sizeUnit = sizeNodes.Last().NodeValue;
release.Size = ReleaseInfo.GetBytes(sizeUnit, ParseUtil.CoerceFloat(sizeVal));
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(4).Cq().Text().Trim());
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(5).Cq().Text().Trim()) + release.Seeders;
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(4).Cq().Text().Trim());
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(5).Cq().Text().Trim()) + release.Seeders;
releases.Add(release);
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -33,8 +33,6 @@ namespace Jackett.Indexers
get { return new Uri(DefaultUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
const string DefaultUrl = "http://showrss.info";
const string searchAllUrl = DefaultUrl + "/feeds/all.rss";
string BaseUrl;
@ -119,54 +117,57 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(searchAllUrl);
XmlDocument xmlDoc = new XmlDocument();
string xml = string.Empty;
WebClient wc = getWebClient();
try
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
{
using (wc)
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(searchAllUrl);
XmlDocument xmlDoc = new XmlDocument();
string xml = string.Empty;
WebClient wc = getWebClient();
try
{
xml = wc.DownloadString(episodeSearchUrl);
xmlDoc.LoadXml(xml);
using (wc)
{
xml = wc.DownloadString(episodeSearchUrl);
xmlDoc.LoadXml(xml);
}
ReleaseInfo release;
string serie_title;
foreach (XmlNode node in xmlDoc.GetElementsByTagName("item"))
{
release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
serie_title = node.SelectSingleNode("title").InnerText;
release.Title = serie_title;
release.Comments = new Uri(node.SelectSingleNode("link").InnerText);
release.Category = node.SelectSingleNode("title").InnerText;
var test = node.SelectSingleNode("enclosure");
release.Guid = new Uri(test.Attributes["url"].Value);
release.PublishDate = DateTime.Parse(node.SelectSingleNode("pubDate").InnerText, CultureInfo.InvariantCulture);
release.Description = node.SelectSingleNode("description").InnerText;
release.InfoHash = node.SelectSingleNode("description").InnerText;
release.Size = 0;
release.Seeders = 1;
release.Peers = 1;
release.MagnetUri = new Uri(node.SelectSingleNode("link").InnerText);
releases.Add(release);
}
}
ReleaseInfo release;
string serie_title;
foreach (XmlNode node in xmlDoc.GetElementsByTagName("item"))
catch (Exception ex)
{
release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
serie_title = node.SelectSingleNode("title").InnerText;
release.Title = serie_title;
release.Comments = new Uri(node.SelectSingleNode("link").InnerText);
release.Category = node.SelectSingleNode("title").InnerText;
var test = node.SelectSingleNode("enclosure");
release.Guid = new Uri(test.Attributes["url"].Value);
release.PublishDate = DateTime.Parse(node.SelectSingleNode("pubDate").InnerText, CultureInfo.InvariantCulture);
release.Description = node.SelectSingleNode("description").InnerText;
release.InfoHash = node.SelectSingleNode("description").InnerText;
release.Size = 0;
release.Seeders = 1;
release.Peers = 1;
release.MagnetUri = new Uri(node.SelectSingleNode("link").InnerText);
releases.Add(release);
OnResultParsingError(this, xml, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, xml, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -32,8 +32,6 @@ namespace Jackett.Indexers
get { return new Uri(DefaultUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
public bool IsConfigured { get; private set; }
const string DefaultUrl = "https://getstrike.net";
@ -100,47 +98,47 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var searchTerm = query.SanitizedSearchTerm ?? "2015";
var searchString = searchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = baseUrl + string.Format(SearchUrl, HttpUtility.UrlEncode(searchString.Trim()));
var results = await client.GetStringAsync(episodeSearchUrl);
try
foreach (var title in query.ShowTitles ?? new string[] { "2015" })
{
var jResults = JObject.Parse(results);
foreach (JObject result in (JArray)jResults["torrents"])
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = baseUrl + string.Format(SearchUrl, HttpUtility.UrlEncode(searchString.Trim()));
var results = await client.GetStringAsync(episodeSearchUrl);
try
{
var release = new ReleaseInfo();
var jResults = JObject.Parse(results);
foreach (JObject result in (JArray)jResults["torrents"])
{
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = (string)result["torrent_title"];
release.Description = release.Title;
release.Seeders = (int)result["seeds"];
release.Peers = (int)result["leeches"] + release.Seeders;
release.Size = (long)result["size"];
release.Title = (string)result["torrent_title"];
release.Description = release.Title;
release.Seeders = (int)result["seeds"];
release.Peers = (int)result["leeches"] + release.Seeders;
release.Size = (long)result["size"];
// "Apr 2, 2015", "Apr 12, 2015" (note the spacing)
var dateString = string.Join(" ", ((string)result["upload_date"]).Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries));
release.PublishDate = DateTime.ParseExact(dateString, "MMM d, yyyy", CultureInfo.InvariantCulture);
// "Apr 2, 2015", "Apr 12, 2015" (note the spacing)
var dateString = string.Join(" ", ((string)result["upload_date"]).Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries));
release.PublishDate = DateTime.ParseExact(dateString, "MMM d, yyyy", CultureInfo.InvariantCulture);
release.Guid = new Uri((string)result["page"]);
release.Comments = release.Guid;
release.Guid = new Uri((string)result["page"]);
release.Comments = release.Guid;
release.InfoHash = (string)result["torrent_hash"];
release.MagnetUri = new Uri((string)result["magnet_uri"]);
release.Link = new Uri(string.Format("{0}{1}", baseUrl, string.Format(DownloadUrl, release.InfoHash)));
release.InfoHash = (string)result["torrent_hash"];
release.MagnetUri = new Uri((string)result["magnet_uri"]);
release.Link = new Uri(string.Format("{0}{1}", baseUrl, string.Format(DownloadUrl, release.InfoHash)));
releases.Add(release);
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -34,8 +34,6 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
public bool IsConfigured { get; private set; }
const string BaseUrl = "http://www.t411.io";
@ -138,50 +136,52 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var searchTerm = string.IsNullOrEmpty(query.SanitizedSearchTerm) ? "%20" : query.SanitizedSearchTerm;
var searchString = searchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var message = new HttpRequestMessage();
message.Method = HttpMethod.Get;
message.RequestUri = new Uri(episodeSearchUrl);
message.Headers.TryAddWithoutValidation("Authorization", await GetAuthToken());
var response = await client.SendAsync(message);
var results = await response.Content.ReadAsStringAsync();
var jsonResult = JObject.Parse(results);
try
foreach (var title in query.ShowTitles ?? new string[] { "%20" })
{
var items = (JArray)jsonResult["torrents"];
foreach (var item in items)
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var message = new HttpRequestMessage();
message.Method = HttpMethod.Get;
message.RequestUri = new Uri(episodeSearchUrl);
message.Headers.TryAddWithoutValidation("Authorization", await GetAuthToken());
var response = await client.SendAsync(message);
var results = await response.Content.ReadAsStringAsync();
var jsonResult = JObject.Parse(results);
try
{
var release = new ReleaseInfo();
var items = (JArray)jsonResult["torrents"];
foreach (var item in items)
{
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
var torrentId = (string)item["id"];
release.Link = new Uri(string.Format(DownloadUrl, torrentId));
release.Title = (string)item["name"];
release.Description = release.Title;
release.Comments = new Uri(string.Format(CommentsUrl, (string)item["rewritename"]));
release.Guid = release.Comments;
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
var torrentId = (string)item["id"];
release.Link = new Uri(string.Format(DownloadUrl, torrentId));
release.Title = (string)item["name"];
release.Description = release.Title;
release.Comments = new Uri(string.Format(CommentsUrl, (string)item["rewritename"]));
release.Guid = release.Comments;
var dateUtc = DateTime.ParseExact((string)item["added"], "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture);
release.PublishDate = DateTime.SpecifyKind(dateUtc, DateTimeKind.Utc).ToLocalTime();
var dateUtc = DateTime.ParseExact((string)item["added"], "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture);
release.PublishDate = DateTime.SpecifyKind(dateUtc, DateTimeKind.Utc).ToLocalTime();
release.Seeders = ParseUtil.CoerceInt((string)item["seeders"]);
release.Peers = ParseUtil.CoerceInt((string)item["leechers"]) + release.Seeders;
release.Seeders = ParseUtil.CoerceInt((string)item["seeders"]);
release.Peers = ParseUtil.CoerceInt((string)item["leechers"]) + release.Seeders;
release.Size = ParseUtil.CoerceLong((string)item["size"]);
release.Size = ParseUtil.CoerceLong((string)item["size"]);
releases.Add(release);
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -26,8 +26,6 @@ namespace Jackett.Indexers
public Uri SiteLink { get { return new Uri(DefaultUrl); } }
public bool RequiresRageIDLookupDisabled { get { return true; } }
public bool IsConfigured { get; private set; }
const string DefaultUrl = "https://thepiratebay.mn";
@ -95,90 +93,101 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var queryStr = HttpUtility.UrlEncode(searchString);
var episodeSearchUrl = baseUrl + string.Format(SearchUrl, queryStr);
List<string> searchUrls = new List<string>();
string results;
if (Program.IsWindows)
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
{
results = await client.GetStringAsync(episodeSearchUrl);
}
else
{
var response = await CurlHelper.GetAsync(episodeSearchUrl, null, episodeSearchUrl);
results = Encoding.UTF8.GetString(response.Content);
var searchString = title + " " + query.GetEpisodeSearchString();
var queryStr = HttpUtility.UrlEncode(searchString);
var episodeSearchUrl = baseUrl + string.Format(SearchUrl, queryStr);
searchUrls.Add(episodeSearchUrl);
}
try
foreach (var episodeSearchUrl in searchUrls)
{
CQ dom = results;
var rows = dom["#searchResult > tbody > tr"];
foreach (var row in rows)
string results;
if (Program.IsWindows)
{
var release = new ReleaseInfo();
results = await client.GetStringAsync(episodeSearchUrl);
}
else
{
var response = await CurlHelper.GetAsync(episodeSearchUrl, null, episodeSearchUrl);
results = Encoding.UTF8.GetString(response.Content);
}
CQ qRow = row.Cq();
CQ qLink = qRow.Find(".detName > .detLink").First();
try
{
CQ dom = results;
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qLink.Text().Trim();
release.Description = release.Title;
release.Comments = new Uri(baseUrl + "/" + qLink.Attr("href").TrimStart('/'));
release.Guid = release.Comments;
var downloadCol = row.ChildElements.ElementAt(1).Cq().Children("a");
release.MagnetUri = new Uri(downloadCol.Attr("href"));
release.InfoHash = release.MagnetUri.ToString().Split(':')[3].Split('&')[0];
var descString = qRow.Find(".detDesc").Text().Trim();
var descParts = descString.Split(',');
var timeString = descParts[0].Split(' ')[1];
if (timeString.Contains("mins ago"))
var rows = dom["#searchResult > tbody > tr"];
foreach (var row in rows)
{
release.PublishDate = (DateTime.Now - TimeSpan.FromMinutes(ParseUtil.CoerceInt(timeString.Split(' ')[0])));
}
else if (timeString.Contains("Today"))
{
release.PublishDate = (DateTime.UtcNow - TimeSpan.FromHours(2) - TimeSpan.Parse(timeString.Split(' ')[1])).ToLocalTime();
}
else if (timeString.Contains("Y-day"))
{
release.PublishDate = (DateTime.UtcNow - TimeSpan.FromHours(26) - TimeSpan.Parse(timeString.Split(' ')[1])).ToLocalTime();
}
else if (timeString.Contains(':'))
{
var utc = DateTime.ParseExact(timeString, "MM-dd HH:mm", CultureInfo.InvariantCulture) - TimeSpan.FromHours(2);
release.PublishDate = DateTime.SpecifyKind(utc, DateTimeKind.Utc).ToLocalTime();
}
else
{
var utc = DateTime.ParseExact(timeString, "MM-dd yyyy", CultureInfo.InvariantCulture) - TimeSpan.FromHours(2);
release.PublishDate = DateTime.SpecifyKind(utc, DateTimeKind.Utc).ToLocalTime();
}
var release = new ReleaseInfo();
var sizeParts = descParts[1].Split(new char[] { ' ', ' ' }, StringSplitOptions.RemoveEmptyEntries);
var sizeVal = ParseUtil.CoerceFloat(sizeParts[1]);
var sizeUnit = sizeParts[2];
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
CQ qRow = row.Cq();
CQ qLink = qRow.Find(".detName > .detLink").First();
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(2).Cq().Text());
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(3).Cq().Text()) + release.Seeders;
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qLink.Text().Trim();
release.Description = release.Title;
release.Comments = new Uri(baseUrl + "/" + qLink.Attr("href").TrimStart('/'));
release.Guid = release.Comments;
releases.Add(release);
var downloadCol = row.ChildElements.ElementAt(1).Cq().Children("a");
release.MagnetUri = new Uri(downloadCol.Attr("href"));
release.InfoHash = release.MagnetUri.ToString().Split(':')[3].Split('&')[0];
var descString = qRow.Find(".detDesc").Text().Trim();
var descParts = descString.Split(',');
var timeString = descParts[0].Split(' ')[1];
if (timeString.Contains("mins ago"))
{
release.PublishDate = (DateTime.Now - TimeSpan.FromMinutes(ParseUtil.CoerceInt(timeString.Split(' ')[0])));
}
else if (timeString.Contains("Today"))
{
release.PublishDate = (DateTime.UtcNow - TimeSpan.FromHours(2) - TimeSpan.Parse(timeString.Split(' ')[1])).ToLocalTime();
}
else if (timeString.Contains("Y-day"))
{
release.PublishDate = (DateTime.UtcNow - TimeSpan.FromHours(26) - TimeSpan.Parse(timeString.Split(' ')[1])).ToLocalTime();
}
else if (timeString.Contains(':'))
{
var utc = DateTime.ParseExact(timeString, "MM-dd HH:mm", CultureInfo.InvariantCulture) - TimeSpan.FromHours(2);
release.PublishDate = DateTime.SpecifyKind(utc, DateTimeKind.Utc).ToLocalTime();
}
else
{
var utc = DateTime.ParseExact(timeString, "MM-dd yyyy", CultureInfo.InvariantCulture) - TimeSpan.FromHours(2);
release.PublishDate = DateTime.SpecifyKind(utc, DateTimeKind.Utc).ToLocalTime();
}
var sizeParts = descParts[1].Split(new char[] { ' ', ' ' }, StringSplitOptions.RemoveEmptyEntries);
var sizeVal = ParseUtil.CoerceFloat(sizeParts[1]);
var sizeUnit = sizeParts[2];
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(2).Cq().Text());
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(3).Cq().Text()) + release.Seeders;
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -33,8 +33,6 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
public bool IsConfigured { get; private set; }
const string BaseUrl = "https://torrentday.eu";
@ -128,60 +126,63 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var results = await client.GetStringAsync(episodeSearchUrl);
try
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
{
CQ dom = results;
var rows = dom["#torrentTable > tbody > tr.browse"];
foreach (var row in rows)
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var results = await client.GetStringAsync(episodeSearchUrl);
try
{
CQ qRow = row.Cq();
var release = new ReleaseInfo();
CQ dom = results;
var rows = dom["#torrentTable > tbody > tr.browse"];
foreach (var row in rows)
{
CQ qRow = row.Cq();
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qRow.Find(".torrentName").Text();
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + "/" + qRow.Find(".torrentName").Attr("href"));
release.Comments = release.Guid;
release.Link = new Uri(BaseUrl + "/" + qRow.Find(".dlLinksInfo > a").Attr("href"));
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qRow.Find(".torrentName").Text();
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + "/" + qRow.Find(".torrentName").Attr("href"));
release.Comments = release.Guid;
release.Link = new Uri(BaseUrl + "/" + qRow.Find(".dlLinksInfo > a").Attr("href"));
var sizeStr = qRow.Find(".sizeInfo").Text().Trim();
var sizeParts = sizeStr.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
var sizeStr = qRow.Find(".sizeInfo").Text().Trim();
var sizeParts = sizeStr.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
var dateStr = qRow.Find(".ulInfo").Text().Split('|').Last().Trim();
var dateParts = dateStr.Split(' ');
var dateValue = ParseUtil.CoerceInt(dateParts[0]);
TimeSpan ts = TimeSpan.Zero;
if (dateStr.Contains("sec"))
ts = TimeSpan.FromSeconds(dateValue);
else if (dateStr.Contains("min"))
ts = TimeSpan.FromMinutes(dateValue);
else if (dateStr.Contains("hour"))
ts = TimeSpan.FromHours(dateValue);
else if (dateStr.Contains("day"))
ts = TimeSpan.FromDays(dateValue);
else if (dateStr.Contains("week"))
ts = TimeSpan.FromDays(dateValue * 7);
else if (dateStr.Contains("month"))
ts = TimeSpan.FromDays(dateValue * 30);
else if (dateStr.Contains("year"))
ts = TimeSpan.FromDays(dateValue * 365);
release.PublishDate = DateTime.Now - ts;
var dateStr = qRow.Find(".ulInfo").Text().Split('|').Last().Trim();
var dateParts = dateStr.Split(' ');
var dateValue = ParseUtil.CoerceInt(dateParts[0]);
TimeSpan ts = TimeSpan.Zero;
if (dateStr.Contains("sec"))
ts = TimeSpan.FromSeconds(dateValue);
else if (dateStr.Contains("min"))
ts = TimeSpan.FromMinutes(dateValue);
else if (dateStr.Contains("hour"))
ts = TimeSpan.FromHours(dateValue);
else if (dateStr.Contains("day"))
ts = TimeSpan.FromDays(dateValue);
else if (dateStr.Contains("week"))
ts = TimeSpan.FromDays(dateValue * 7);
else if (dateStr.Contains("month"))
ts = TimeSpan.FromDays(dateValue * 30);
else if (dateStr.Contains("year"))
ts = TimeSpan.FromDays(dateValue * 365);
release.PublishDate = DateTime.Now - ts;
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".seedersInfo").Text());
release.Peers = ParseUtil.CoerceInt(qRow.Find(".leechersInfo").Text()) + release.Seeders;
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".seedersInfo").Text());
release.Peers = ParseUtil.CoerceInt(qRow.Find(".leechersInfo").Text()) + release.Seeders;
releases.Add(release);
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -33,8 +33,6 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
const string BaseUrl = "http://www.torrentleech.org";
const string LoginUrl = BaseUrl + "/user/account/login/";
const string SearchUrl = BaseUrl + "/torrents/browse/index/query/{0}/categories/2%2C26%2C27%2C32/orderby/added?";
@ -111,54 +109,60 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var results = await client.GetStringAsync(episodeSearchUrl);
try
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
{
CQ dom = results;
CQ qRows = dom["#torrenttable > tbody > tr"];
foreach (var row in qRows)
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var results = await client.GetStringAsync(episodeSearchUrl);
try
{
var release = new ReleaseInfo();
CQ dom = results;
var qRow = row.Cq();
CQ qRows = dom["#torrenttable > tbody > tr"];
var debug = qRow.Html();
foreach (var row in qRows)
{
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
var qRow = row.Cq();
CQ qLink = qRow.Find(".title > a").First();
release.Guid = new Uri(BaseUrl + qLink.Attr("href"));
release.Comments = release.Guid;
release.Title = qLink.Text();
release.Description = release.Title;
var debug = qRow.Html();
release.Link = new Uri(BaseUrl + qRow.Find(".quickdownload > a").Attr("href"));
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
var dateString = qRow.Find(".name").First()[0].ChildNodes[4].NodeValue.Replace(" on", "").Trim();
//"2015-04-25 23:38:12"
//"yyyy-MMM-dd hh:mm:ss"
release.PublishDate = DateTime.ParseExact(dateString, "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture);
CQ qLink = qRow.Find(".title > a").First();
release.Guid = new Uri(BaseUrl + qLink.Attr("href"));
release.Comments = release.Guid;
release.Title = qLink.Text();
release.Description = release.Title;
var sizeStringParts = qRow.Children().ElementAt(4).InnerText.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeStringParts[1], ParseUtil.CoerceFloat(sizeStringParts[0]));
release.Link = new Uri(BaseUrl + qRow.Find(".quickdownload > a").Attr("href"));
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".seeders").Text());
release.Peers = release.Seeders + ParseUtil.CoerceInt(qRow.Find(".leechers").Text());
var dateString = qRow.Find(".name").First()[0].ChildNodes[4].NodeValue.Replace(" on", "").Trim();
//"2015-04-25 23:38:12"
//"yyyy-MMM-dd hh:mm:ss"
release.PublishDate = DateTime.ParseExact(dateString, "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture);
releases.Add(release);
var sizeStringParts = qRow.Children().ElementAt(4).InnerText.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeStringParts[1], ParseUtil.CoerceFloat(sizeStringParts[0]));
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".seeders").Text());
release.Peers = release.Seeders + ParseUtil.CoerceInt(qRow.Find(".leechers").Text());
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -33,8 +33,6 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
const string BaseUrl = "http://torrentshack.me";
const string LoginUrl = BaseUrl + "/login.php";
const string SearchUrl = BaseUrl + "/torrents.php?searchstr={0}&release_type=both&searchtags=&tags_type=0&order_by=s3&order_way=desc&torrent_preset=all&filter_cat%5B600%5D=1&filter_cat%5B620%5D=1&filter_cat%5B700%5D=1&filter_cat%5B981%5D=1&filter_cat%5B980%5D=1";
@ -113,67 +111,69 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var results = await client.GetStringAsync(episodeSearchUrl);
try
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
{
CQ dom = results;
var rows = dom["#torrent_table > tbody > tr.torrent"];
foreach (var row in rows)
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var results = await client.GetStringAsync(episodeSearchUrl);
try
{
CQ qRow = row.Cq();
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qRow.Find(".torrent_name_link").Text();
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + "/" + qRow.Find(".torrent_name_link").Parent().Attr("href"));
release.Comments = release.Guid;
release.Link = new Uri(BaseUrl + "/" + qRow.Find(".torrent_handle_links > a").First().Attr("href"));
var dateStr = qRow.Find(".time").Text().Trim();
if (dateStr.ToLower().Contains("just now"))
release.PublishDate = DateTime.Now;
else
CQ dom = results;
var rows = dom["#torrent_table > tbody > tr.torrent"];
foreach (var row in rows)
{
var dateParts = dateStr.Split(' ');
var dateValue = ParseUtil.CoerceInt(dateParts[0]);
TimeSpan ts = TimeSpan.Zero;
if (dateStr.Contains("Just now"))
ts = TimeSpan.Zero;
else if (dateStr.Contains("sec"))
ts = TimeSpan.FromSeconds(dateValue);
else if (dateStr.Contains("min"))
ts = TimeSpan.FromMinutes(dateValue);
else if (dateStr.Contains("hour"))
ts = TimeSpan.FromHours(dateValue);
else if (dateStr.Contains("day"))
ts = TimeSpan.FromDays(dateValue);
else if (dateStr.Contains("week"))
ts = TimeSpan.FromDays(dateValue * 7);
else if (dateStr.Contains("month"))
ts = TimeSpan.FromDays(dateValue * 30);
else if (dateStr.Contains("year"))
ts = TimeSpan.FromDays(dateValue * 365);
release.PublishDate = DateTime.Now - ts;
CQ qRow = row.Cq();
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qRow.Find(".torrent_name_link").Text();
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + "/" + qRow.Find(".torrent_name_link").Parent().Attr("href"));
release.Comments = release.Guid;
release.Link = new Uri(BaseUrl + "/" + qRow.Find(".torrent_handle_links > a").First().Attr("href"));
var dateStr = qRow.Find(".time").Text().Trim();
if (dateStr.ToLower().Contains("just now"))
release.PublishDate = DateTime.Now;
else
{
var dateParts = dateStr.Split(' ');
var dateValue = ParseUtil.CoerceInt(dateParts[0]);
TimeSpan ts = TimeSpan.Zero;
if (dateStr.Contains("Just now"))
ts = TimeSpan.Zero;
else if (dateStr.Contains("sec"))
ts = TimeSpan.FromSeconds(dateValue);
else if (dateStr.Contains("min"))
ts = TimeSpan.FromMinutes(dateValue);
else if (dateStr.Contains("hour"))
ts = TimeSpan.FromHours(dateValue);
else if (dateStr.Contains("day"))
ts = TimeSpan.FromDays(dateValue);
else if (dateStr.Contains("week"))
ts = TimeSpan.FromDays(dateValue * 7);
else if (dateStr.Contains("month"))
ts = TimeSpan.FromDays(dateValue * 30);
else if (dateStr.Contains("year"))
ts = TimeSpan.FromDays(dateValue * 365);
release.PublishDate = DateTime.Now - ts;
}
var sizeStr = qRow.Find(".size")[0].ChildNodes[0].NodeValue.Trim();
var sizeParts = sizeStr.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
release.Seeders = ParseUtil.CoerceInt(qRow.Children().ElementAt(6).InnerText.Trim());
release.Peers = ParseUtil.CoerceInt(qRow.Children().ElementAt(7).InnerText.Trim()) + release.Seeders;
releases.Add(release);
}
var sizeStr = qRow.Find(".size")[0].ChildNodes[0].NodeValue.Trim();
var sizeParts = sizeStr.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
release.Seeders = ParseUtil.CoerceInt(qRow.Children().ElementAt(6).InnerText.Trim());
release.Peers = ParseUtil.CoerceInt(qRow.Children().ElementAt(7).InnerText.Trim()) + release.Seeders;
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
return releases.ToArray();
}

View File

@ -32,8 +32,6 @@ namespace Jackett.Indexers
get { return new Uri(DefaultUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
const string DefaultUrl = "https://torrentz.eu";
const string SearchUrl = DefaultUrl + "/feed_verifiedP?f={0}";
string BaseUrl;
@ -106,54 +104,57 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString.Trim()));
XmlDocument xmlDoc = new XmlDocument();
string xml = string.Empty;
WebClient wc = getWebClient();
try
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
{
using (wc)
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString.Trim()));
XmlDocument xmlDoc = new XmlDocument();
string xml = string.Empty;
WebClient wc = getWebClient();
try
{
xml = wc.DownloadString(episodeSearchUrl);
xmlDoc.LoadXml(xml);
using (wc)
{
xml = wc.DownloadString(episodeSearchUrl);
xmlDoc.LoadXml(xml);
}
ReleaseInfo release;
TorrentzHelper td;
string serie_title;
foreach (XmlNode node in xmlDoc.GetElementsByTagName("item"))
{
release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
serie_title = node.SelectSingleNode("title").InnerText;
release.Title = serie_title;
release.Comments = new Uri(node.SelectSingleNode("link").InnerText);
release.Category = node.SelectSingleNode("category").InnerText;
release.Guid = new Uri(node.SelectSingleNode("guid").InnerText);
release.PublishDate = DateTime.Parse(node.SelectSingleNode("pubDate").InnerText, CultureInfo.InvariantCulture);
td = new TorrentzHelper(node.SelectSingleNode("description").InnerText);
release.Description = td.Description;
release.InfoHash = td.hash;
release.Size = td.Size;
release.Seeders = td.Seeders;
release.Peers = td.Peers + release.Seeders;
release.MagnetUri = TorrentzHelper.createMagnetLink(td.hash, serie_title);
releases.Add(release);
}
}
ReleaseInfo release;
TorrentzHelper td;
string serie_title;
foreach (XmlNode node in xmlDoc.GetElementsByTagName("item"))
catch (Exception ex)
{
release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
serie_title = node.SelectSingleNode("title").InnerText;
release.Title = serie_title;
release.Comments = new Uri(node.SelectSingleNode("link").InnerText);
release.Category = node.SelectSingleNode("category").InnerText;
release.Guid = new Uri(node.SelectSingleNode("guid").InnerText);
release.PublishDate = DateTime.Parse(node.SelectSingleNode("pubDate").InnerText, CultureInfo.InvariantCulture);
td = new TorrentzHelper(node.SelectSingleNode("description").InnerText);
release.Description = td.Description;
release.InfoHash = td.hash;
release.Size = td.Size;
release.Seeders = td.Seeders;
release.Peers = td.Peers + release.Seeders;
release.MagnetUri = TorrentzHelper.createMagnetLink(td.hash, serie_title);
releases.Add(release);
OnResultParsingError(this, xml, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, xml, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -133,7 +133,9 @@
<Compile Include="ResultPage.cs" />
<Compile Include="Server.cs" />
<Compile Include="ServerUtil.cs" />
<Compile Include="SonarApi.cs" />
<Compile Include="TorznabQuery.cs" />
<Compile Include="TVRage.cs" />
<Compile Include="WebApi.cs" />
<Compile Include="CurlHelper.cs" />
<Compile Include="Indexers\AlphaRatio.cs" />
@ -240,6 +242,7 @@
<Content Include="WebContent\jquery-2.1.3.min.js">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>
<Content Include="WebContent\logos\freshon.png">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>
@ -324,4 +327,4 @@
</Properties>
</MonoDevelop>
</ProjectExtensions>
</Project>
</Project>

View File

@ -23,6 +23,7 @@ namespace Jackett
HttpListener listener;
IndexerManager indexerManager;
WebApi webApi;
SonarrApi sonarrApi;
public Server()
@ -33,8 +34,11 @@ namespace Jackett
ReadServerSettingsFile();
LoadApiKey();
indexerManager = new IndexerManager();
webApi = new WebApi(indexerManager);
sonarrApi = new SonarrApi();
webApi = new WebApi(indexerManager, sonarrApi);
}
void LoadApiKey()
@ -202,10 +206,10 @@ namespace Jackett
var torznabQuery = TorznabQuery.FromHttpQuery(query);
if (torznabQuery.RageIDLookupEnabled && indexer.RequiresRageIDLookupDisabled)
{
throw new ArgumentException("This indexer requires RageID lookup disabled");
}
if (torznabQuery.RageID != 0)
torznabQuery.ShowTitles = await sonarrApi.GetShowTitle(torznabQuery.RageID);
else if (!string.IsNullOrEmpty(torznabQuery.SearchTerm))
torznabQuery.ShowTitles = new string[] { torznabQuery.SearchTerm };
var releases = await indexer.PerformQuery(torznabQuery);

169
src/Jackett/SonarApi.cs Normal file
View File

@ -0,0 +1,169 @@
using Newtonsoft.Json.Linq;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Text;
using System.Threading.Tasks;
namespace Jackett
{
public class SonarrApi
{
public class ConfigurationSonarr : ConfigurationData
{
public StringItem Host { get; private set; }
public StringItem Port { get; private set; }
public StringItem ApiKey { get; private set; }
DisplayItem ApiInfo;
public ConfigurationSonarr()
{
Host = new StringItem { Name = "Host", Value = "http://localhost" };
Port = new StringItem { Name = "Port", Value = "8989" };
ApiKey = new StringItem { Name = "API Key" };
ApiInfo = new DisplayItem("API Key can be found in Sonarr > Settings > General > Security") { Name = "API Info" };
}
public override Item[] GetItems()
{
return new Item[] { Host, Port, ApiKey, ApiInfo };
}
}
static string SonarrConfigFile = Path.Combine(Program.AppConfigDirectory, "sonarr_api.json");
string Host;
int Port;
string ApiKey;
CookieContainer cookies;
HttpClientHandler handler;
HttpClient client;
ConcurrentDictionary<int, string[]> IdNameMappings;
public SonarrApi()
{
LoadSettings();
cookies = new CookieContainer();
handler = new HttpClientHandler
{
CookieContainer = cookies,
AllowAutoRedirect = true,
UseCookies = true,
};
client = new HttpClient(handler);
IdNameMappings = new ConcurrentDictionary<int, string[]>();
}
async Task ReloadNameMappings(string host, int port, string apiKey)
{
Uri hostUri = new Uri(host);
var queryUrl = string.Format("http://{0}:{1}/api/series?apikey={2}", hostUri.Host, port, apiKey);
var response = await client.GetStringAsync(queryUrl);
var json = JArray.Parse(response);
IdNameMappings.Clear();
foreach (var item in json)
{
var titles = new List<string>();
titles.Add(SanitizeTitle((string)item["title"]));
foreach (var t in item["alternateTitles"])
{
titles.Add(SanitizeTitle((string)t["title"]));
}
IdNameMappings.TryAdd((int)item["tvRageId"], titles.ToArray());
}
}
string SanitizeTitle(string title)
{
char[] arr = title.ToCharArray();
arr = Array.FindAll<char>(arr, c => (char.IsLetterOrDigit(c)
|| char.IsWhiteSpace(c)
|| c == '-'
|| c == '.'
));
title = new string(arr);
return title;
}
void LoadSettings()
{
try
{
if (File.Exists(SonarrConfigFile))
{
var json = JObject.Parse(File.ReadAllText(SonarrConfigFile));
Host = (string)json["host"];
Port = (int)json["port"];
ApiKey = (string)json["api_key"];
}
}
catch (Exception) { }
}
void SaveSettings()
{
JObject json = new JObject();
json["host"] = Host;
json["port"] = Port;
json["api_key"] = ApiKey;
File.WriteAllText(SonarrConfigFile, json.ToString());
}
public ConfigurationSonarr GetConfiguration()
{
var config = new ConfigurationSonarr();
if (ApiKey != null)
{
config.Host.Value = Host;
config.Port.Value = Port.ToString();
config.ApiKey.Value = ApiKey;
}
return config;
}
public async Task ApplyConfiguration(JToken configJson)
{
var config = new ConfigurationSonarr();
config.LoadValuesFromJson(configJson);
await ReloadNameMappings(config.Host.Value, ParseUtil.CoerceInt(config.Port.Value), config.ApiKey.Value);
Host = "http://" + new Uri(config.Host.Value).Host;
Port = ParseUtil.CoerceInt(config.Port.Value);
ApiKey = config.ApiKey.Value;
SaveSettings();
}
public async Task TestConnection()
{
await ReloadNameMappings(Host, Port, ApiKey);
}
public async Task<string[]> GetShowTitle(int rid)
{
if (rid == 0)
return null;
int tries = 0;
while (tries < 2)
{
string[] titles;
if (IdNameMappings.TryGetValue(rid, out titles))
return titles;
await ReloadNameMappings(Host, Port, ApiKey);
tries++;
}
return null;
}
}
}

12
src/Jackett/TVRage.cs Normal file
View File

@ -0,0 +1,12 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Jackett
{
class TVRage
{
}
}

View File

@ -17,12 +17,10 @@ namespace Jackett
public int Limit { get; private set; }
public int Offset { get; private set; }
public int RageID { get; private set; }
public bool RageIDLookupEnabled { get; private set; }
public int Season { get; private set; }
public string Episode { get; private set; }
public string SearchTerm { get; private set; }
public string SanitizedSearchTerm { get; private set; }
public string[] ShowTitles { get; set; }
public string SearchTerm { get; set; }
public string GetEpisodeSearchString()
{
@ -41,39 +39,13 @@ namespace Jackett
return episodeString;
}
static string SanitizeSearchTerm(string title)
{
char[] arr = title.ToCharArray();
arr = Array.FindAll<char>(arr, c => (char.IsLetterOrDigit(c)
|| char.IsWhiteSpace(c)
|| c == '-'
|| c == '.'
));
title = new string(arr);
return title;
}
public static TorznabQuery FromHttpQuery(NameValueCollection query)
{
//{t=tvsearch&cat=5030%2c5040&extended=1&apikey=test&offset=0&limit=100&rid=24493&season=5&ep=1}
var q = new TorznabQuery();
q.QueryType = query["t"];
if (query["q"] == null)
{
q.SearchTerm = string.Empty;
q.SanitizedSearchTerm = string.Empty;
}
else
{
q.SearchTerm = query["q"];
q.SanitizedSearchTerm = SanitizeSearchTerm(q.SearchTerm);
}
q.RageIDLookupEnabled = query["rid_enabled"] != "0";
q.SearchTerm = query["q"];
if (query["cat"] != null)
{
q.Categories = query["cat"].Split(',');
@ -93,17 +65,11 @@ namespace Jackett
q.Offset = ParseUtil.CoerceInt(query["offset"]);
}
int rageId;
if (int.TryParse(query["rid"], out rageId))
{
q.RageID = rageId;
}
int season;
if (int.TryParse(query["season"], out season))
{
q.Season = season;
}
int temp;
if (int.TryParse(query["rid"], out temp))
q.RageID = temp;
if (int.TryParse(query["season"], out temp))
q.Season = temp;
q.Episode = query["ep"];

View File

@ -27,6 +27,7 @@ namespace Jackett
DeleteIndexer,
GetSonarrConfig,
ApplySonarrConfig,
TestSonarr,
GetJackettConfig,
ApplyJackettConfig,
JackettRestart,
@ -40,16 +41,19 @@ namespace Jackett
{ "delete_indexer", WebApiMethod.DeleteIndexer },
{ "get_sonarr_config", WebApiMethod.GetSonarrConfig },
{ "apply_sonarr_config", WebApiMethod.ApplySonarrConfig },
{ "test_sonarr", WebApiMethod.TestSonarr },
{ "get_jackett_config",WebApiMethod.GetJackettConfig},
{ "apply_jackett_config",WebApiMethod.ApplyJackettConfig},
{ "jackett_restart", WebApiMethod.JackettRestart },
};
IndexerManager indexerManager;
SonarrApi sonarrApi;
public WebApi(IndexerManager indexerManager)
public WebApi(IndexerManager indexerManager, SonarrApi sonarrApi)
{
this.indexerManager = indexerManager;
this.sonarrApi = sonarrApi;
}
public async Task<bool> HandleRequest(HttpListenerContext context)
@ -121,6 +125,15 @@ namespace Jackett
case WebApiMethod.DeleteIndexer:
handlerTask = HandleDeleteIndexer;
break;
case WebApiMethod.GetSonarrConfig:
handlerTask = HandleGetSonarrConfig;
break;
case WebApiMethod.ApplySonarrConfig:
handlerTask = HandleApplySonarrConfig;
break;
case WebApiMethod.TestSonarr:
handlerTask = HandleTestSonarr;
break;
case WebApiMethod.ApplyJackettConfig:
handlerTask = HandleApplyJackettConfig;
break;
@ -151,6 +164,55 @@ namespace Jackett
}
}
async Task<JToken> HandleTestSonarr(HttpListenerContext context)
{
JToken jsonReply = new JObject();
try
{
await sonarrApi.TestConnection();
jsonReply["result"] = "success";
}
catch (Exception ex)
{
jsonReply["result"] = "error";
jsonReply["error"] = ex.Message;
}
return jsonReply;
}
async Task<JToken> HandleApplySonarrConfig(HttpListenerContext context)
{
JToken jsonReply = new JObject();
try
{
var postData = await ReadPostDataJson(context.Request.InputStream);
await sonarrApi.ApplyConfiguration(postData);
jsonReply["result"] = "success";
}
catch (Exception ex)
{
jsonReply["result"] = "error";
jsonReply["error"] = ex.Message;
}
return jsonReply;
}
Task<JToken> HandleGetSonarrConfig(HttpListenerContext context)
{
JObject jsonReply = new JObject();
try
{
jsonReply["config"] = sonarrApi.GetConfiguration().ToJson();
jsonReply["result"] = "success";
}
catch (Exception ex)
{
jsonReply["result"] = "error";
jsonReply["error"] = ex.Message;
}
return Task.FromResult<JToken>(jsonReply);
}
Task<JToken> HandleInvalidApiMethod(HttpListenerContext context)
{
JToken jsonReply = new JObject();
@ -214,7 +276,7 @@ namespace Jackett
jsonReply["api_key"] = ApiKey.CurrentKey;
jsonReply["app_version"] = Assembly.GetExecutingAssembly().GetName().Version.ToString();
JArray items = new JArray();
foreach (var i in indexerManager.Indexers.OrderBy(_ => _.Key))
foreach (var i in indexerManager.Indexers.OrderBy(_=>_.Key))
{
var indexer = i.Value;
var item = new JObject();

View File

@ -2,6 +2,7 @@
reloadIndexers();
loadJackettSettings();
loadSonarrInfo();
function loadJackettSettings() {
getJackettConfig(function (data) {
@ -43,6 +44,86 @@ function getJackettConfig(callback) {
});
}
function loadSonarrInfo() {
getSonarrConfig(function (data) {
$("#sonarr-host").val("");
var host, port, apiKey;
for (var i = 0; i < data.config.length; i++) {
if (data.config[i].id == "host")
host = data.config[i].value;
if (data.config[i].id == "port")
port = data.config[i].value;
if (data.config[i].id == "apikey")
apiKey = data.config[i].value;
}
if (!apiKey)
$("#sonarr-warning").show();
else {
$("#sonarr-warning").hide();
$("#sonarr-host").val(host + ":" + port);
}
});
}
function getSonarrConfig(callback) {
var jqxhr = $.get("get_sonarr_config", function (data) {
callback(data);
}).fail(function () {
doNotify("Error loading Sonarr API configuration, request to Jackett server failed", "danger", "glyphicon glyphicon-alert");
});
}
$("#sonarr-test").click(function () {
var jqxhr = $.get("get_indexers", function (data) {
if (data.result == "error")
doNotify("Test failed for Sonarr API\n" + data.error, "danger", "glyphicon glyphicon-alert");
else
doNotify("Test successful for Sonarr API", "success", "glyphicon glyphicon-ok");
}).fail(function () {
doNotify("Error testing Sonarr, request to Jackett server failed", "danger", "glyphicon glyphicon-alert");
});
});
$("#sonarr-settings").click(function () {
getSonarrConfig(function (data) {
var config = data.config;
var configForm = newConfigModal("Sonarr API", config);
var $goButton = configForm.find(".setup-indexer-go");
$goButton.click(function () {
var data = getConfigModalJson(configForm);
var originalBtnText = $goButton.html();
$goButton.prop('disabled', true);
$goButton.html($('#templates > .spinner')[0].outerHTML);
var jqxhr = $.post("apply_sonarr_config", JSON.stringify(data), function (data) {
if (data.result == "error") {
if (data.config) {
populateSetupForm(data.indexer, data.name, data.config);
}
doNotify("Configuration failed: " + data.error, "danger", "glyphicon glyphicon-alert");
}
else {
configForm.modal("hide");
loadSonarrInfo();
doNotify("Successfully configured Sonarr API", "success", "glyphicon glyphicon-ok");
}
}).fail(function () {
doNotify("Request to Jackett server failed", "danger", "glyphicon glyphicon-alert");
}).always(function () {
$goButton.html(originalBtnText);
$goButton.prop('disabled', false);
});
});
configForm.modal("show");
});
});
function reloadIndexers() {
$('#indexers').hide();
$('#indexers > .indexer').remove();

View File

@ -25,18 +25,29 @@
<hr />
<div class="input-area">
<h4>Adding a Jackett indexer in Sonarr</h4>
<ol>
<li>In Sonarr go to <b>Settings > Indexers > Add > Torznab > Custom</b></li>
<li>For <b>URL</b> enter the <b>Torznab Host</b> of one of the indexers below</li>
<li>For <b>API key</b> using the key below</li>
<li>Turn off <b>Enable RageID Lookup</b></li>
</ol>
<span class="input-header">Sonarr API Host: </span>
<input id="sonarr-host" class="form-control input-right" type="text" readonly />
<button id="sonarr-settings" class="btn btn-primary btn-sm">
Settings <span class="glyphicon glyphicon-wrench" aria-hidden="true"></span>
</button>
<button id="sonarr-test" class="btn btn-warning btn-sm">
Test <span class="glyphicon glyphicon-screenshot" aria-hidden="true"></span>
</button>
<p id="sonarr-warning" class="alert alert-danger" role="alert">
<span class="glyphicon glyphicon-exclamation-sign"></span>
Sonarr API must be configured
</p>
</div>
<hr />
<div class="input-area">
<p>
To add a Jackett indexer in Sonarr go to <b>Settings > Indexers > Add > Torznab > Custom</b>.
</p>
<span class="input-header">Jackett API Key: </span>
<input id="api-key-input" class="form-control input-right" type="text" value="" placeholder="API Key" readonly="">
</div>
<hr />
<div class="input-area">
<p>Use this key when adding indexers to Sonarr. This key works for all indexers.</p>
<span class="input-header">Jackett port: </span>
<input id="jackett-port" class="form-control input-right" type="text" value="" placeholder="9117">
<button id="change-jackett-port" class="btn btn-primary btn-sm">