Refactored to use the RageID lookup disabled, removed Sonarr API setup

This commit is contained in:
unknown 2015-07-18 14:35:02 -06:00
parent 7cfa590b94
commit b279c871b1
30 changed files with 922 additions and 1257 deletions

View File

@ -20,6 +20,8 @@ namespace Jackett
string DisplayDescription { get; }
Uri SiteLink { get; }
bool RequiresRageIDLookupDisabled { get; }
// Whether this indexer has been configured, verified and saved in the past and has the settings required for functioning
bool IsConfigured { get; }

View File

@ -29,6 +29,8 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
public event Action<IndexerInterface, JToken> OnSaveConfigurationRequested;
public event Action<IndexerInterface, string, Exception> OnResultParsingError;
@ -161,68 +163,63 @@ namespace Jackett.Indexers
public async Task<ReleaseInfo[]> PerformQuery(TorznabQuery query)
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = SearchUrl + HttpUtility.UrlEncode(searchString);
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
string results;
if (Program.IsWindows)
{
var request = CreateHttpRequest(new Uri(episodeSearchUrl));
request.Method = HttpMethod.Get;
var response = await client.SendAsync(request);
results = await response.Content.ReadAsStringAsync();
}
else
{
var response = await CurlHelper.GetAsync(episodeSearchUrl, cookieHeader);
results = Encoding.UTF8.GetString(response.Content);
}
try
{
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = SearchUrl + HttpUtility.UrlEncode(searchString);
var json = JObject.Parse(results);
foreach (JObject r in json["response"]["results"])
{
DateTime pubDate = DateTime.MinValue;
double dateNum;
if (double.TryParse((string)r["groupTime"], out dateNum))
pubDate = UnixTimestampToDateTime(dateNum);
string results;
if (Program.IsWindows)
{
var request = CreateHttpRequest(new Uri(episodeSearchUrl));
request.Method = HttpMethod.Get;
var response = await client.SendAsync(request);
results = await response.Content.ReadAsStringAsync();
}
else
{
var response = await CurlHelper.GetAsync(episodeSearchUrl, cookieHeader);
results = Encoding.UTF8.GetString(response.Content);
}
try
{
var groupName = (string)r["groupName"];
var json = JObject.Parse(results);
foreach (JObject r in json["response"]["results"])
if (r["torrents"] is JArray)
{
DateTime pubDate = DateTime.MinValue;
double dateNum;
if (double.TryParse((string)r["groupTime"], out dateNum))
pubDate = UnixTimestampToDateTime(dateNum);
var groupName = (string)r["groupName"];
if (r["torrents"] is JArray)
{
foreach (JObject t in r["torrents"])
{
var release = new ReleaseInfo();
release.PublishDate = pubDate;
release.Title = groupName;
release.Description = groupName;
FillReleaseInfoFromJson(release, t);
releases.Add(release);
}
}
else
foreach (JObject t in r["torrents"])
{
var release = new ReleaseInfo();
release.PublishDate = pubDate;
release.Title = groupName;
release.Description = groupName;
FillReleaseInfoFromJson(release, r);
FillReleaseInfoFromJson(release, t);
releases.Add(release);
}
}
else
{
var release = new ReleaseInfo();
release.PublishDate = pubDate;
release.Title = groupName;
release.Description = groupName;
FillReleaseInfoFromJson(release, r);
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();

View File

@ -58,6 +58,8 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
const string BaseUrl = "https://animebytes.tv";
const string LoginUrl = BaseUrl + "/user/login";
const string SearchUrl = BaseUrl + "/torrents.php?filter_cat[1]=1";
@ -196,15 +198,12 @@ namespace Jackett.Indexers
public async Task<ReleaseInfo[]> PerformQuery(TorznabQuery query)
{
// The result list
var releases = new ConcurrentBag<ReleaseInfo>();
var titles = query.ShowTitles ?? new string[] { query.SearchTerm??string.Empty };
var releases = new List<ReleaseInfo>();
var tasks = titles.Select(async item =>
foreach (var result in await GetResults(query.SanitizedSearchTerm))
{
foreach (var result in await GetResults(item))
releases.Add(result);
});
await Task.WhenAll(tasks);
releases.Add(result);
}
return releases.ToArray();
}
@ -334,7 +333,7 @@ namespace Jackett.Indexers
release.MinimumRatio = 1;
release.MinimumSeedTime = 259200;
var downloadLink = links.Get(0);
// We dont know this so try to fake based on the release year
release.PublishDate = new DateTime(year, 1, 1);
release.PublishDate = release.PublishDate.AddDays(Math.Min(DateTime.Now.DayOfYear, 365) - 1);
@ -342,7 +341,7 @@ namespace Jackett.Indexers
var infoLink = links.Get(1);
release.Comments = new Uri(BaseUrl + "/" + infoLink.Attributes.GetAttribute("href"));
release.Guid = new Uri(BaseUrl + "/" + infoLink.Attributes.GetAttribute("href") + "&nh=" + Hash(title)); // Sonarr should dedupe on this url - allow a url per name.
release.Link = new Uri(BaseUrl + "/" + downloadLink.Attributes.GetAttribute("href"));
release.Link = new Uri(BaseUrl + "/" + downloadLink.Attributes.GetAttribute("href"));
// We dont actually have a release name >.> so try to create one
var releaseTags = infoLink.InnerText.Split("|".ToCharArray(), StringSplitOptions.RemoveEmptyEntries).ToList();

View File

@ -32,6 +32,8 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
public bool IsConfigured { get; private set; }
const string BaseUrl = "https://beyondhd.me";
@ -100,75 +102,73 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var results = await client.GetStringAsync(episodeSearchUrl);
try
{
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var results = await client.GetStringAsync(episodeSearchUrl);
try
CQ dom = results;
var rows = dom["table.torrenttable > tbody > tr.browse_color"];
foreach (var row in rows)
{
CQ dom = results;
var rows = dom["table.torrenttable > tbody > tr.browse_color"];
foreach (var row in rows)
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
var qRow = row.Cq();
var qLink = row.ChildElements.ElementAt(2).FirstChild.Cq();
release.Link = new Uri(BaseUrl + "/" + qLink.Attr("href"));
var torrentID = qLink.Attr("href").Split('=').Last();
var descCol = row.ChildElements.ElementAt(3);
var qCommentLink = descCol.FirstChild.Cq();
release.Title = qCommentLink.Text();
release.Description = release.Title;
release.Comments = new Uri(BaseUrl + "/" + qCommentLink.Attr("href"));
release.Guid = release.Comments;
var dateStr = descCol.ChildElements.Last().Cq().Text().Split('|').Last().ToLowerInvariant().Replace("ago.", "").Trim();
var dateParts = dateStr.Split(new char[] { ' ', ' ' }, StringSplitOptions.RemoveEmptyEntries);
var timeSpan = TimeSpan.Zero;
for (var i = 0; i < dateParts.Length / 2; i++)
{
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
var qRow = row.Cq();
var qLink = row.ChildElements.ElementAt(2).FirstChild.Cq();
release.Link = new Uri(BaseUrl + "/" + qLink.Attr("href"));
var torrentID = qLink.Attr("href").Split('=').Last();
var descCol = row.ChildElements.ElementAt(3);
var qCommentLink = descCol.FirstChild.Cq();
release.Title = qCommentLink.Text();
release.Description = release.Title;
release.Comments = new Uri(BaseUrl + "/" + qCommentLink.Attr("href"));
release.Guid = release.Comments;
var dateStr = descCol.ChildElements.Last().Cq().Text().Split('|').Last().ToLowerInvariant().Replace("ago.", "").Trim();
var dateParts = dateStr.Split(new char[] { ' ', ' ' }, StringSplitOptions.RemoveEmptyEntries);
var timeSpan = TimeSpan.Zero;
for (var i = 0; i < dateParts.Length / 2; i++)
{
var timeVal = ParseUtil.CoerceInt(dateParts[i * 2]);
var timeUnit = dateParts[i * 2 + 1];
if (timeUnit.Contains("year"))
timeSpan += TimeSpan.FromDays(365 * timeVal);
else if (timeUnit.Contains("month"))
timeSpan += TimeSpan.FromDays(30 * timeVal);
else if (timeUnit.Contains("day"))
timeSpan += TimeSpan.FromDays(timeVal);
else if (timeUnit.Contains("hour"))
timeSpan += TimeSpan.FromHours(timeVal);
else if (timeUnit.Contains("min"))
timeSpan += TimeSpan.FromMinutes(timeVal);
}
release.PublishDate = DateTime.SpecifyKind(DateTime.Now - timeSpan, DateTimeKind.Local);
var sizeEl = row.ChildElements.ElementAt(7);
var sizeVal = ParseUtil.CoerceFloat(sizeEl.ChildNodes.First().NodeValue);
var sizeUnit = sizeEl.ChildNodes.Last().NodeValue;
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(9).Cq().Text());
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(10).Cq().Text()) + release.Seeders;
releases.Add(release);
var timeVal = ParseUtil.CoerceInt(dateParts[i * 2]);
var timeUnit = dateParts[i * 2 + 1];
if (timeUnit.Contains("year"))
timeSpan += TimeSpan.FromDays(365 * timeVal);
else if (timeUnit.Contains("month"))
timeSpan += TimeSpan.FromDays(30 * timeVal);
else if (timeUnit.Contains("day"))
timeSpan += TimeSpan.FromDays(timeVal);
else if (timeUnit.Contains("hour"))
timeSpan += TimeSpan.FromHours(timeVal);
else if (timeUnit.Contains("min"))
timeSpan += TimeSpan.FromMinutes(timeVal);
}
}
release.PublishDate = DateTime.SpecifyKind(DateTime.Now - timeSpan, DateTimeKind.Local);
var sizeEl = row.ChildElements.ElementAt(7);
var sizeVal = ParseUtil.CoerceFloat(sizeEl.ChildNodes.First().NodeValue);
var sizeUnit = sizeEl.ChildNodes.Last().NodeValue;
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(9).Cq().Text());
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(10).Cq().Text()) + release.Seeders;
releases.Add(release);
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -31,6 +31,8 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
static string BaseUrl = "https://www.bit-hdtv.com";
static string LoginUrl = BaseUrl + "/takelogin.php";
static string SearchUrl = BaseUrl + "/torrents.php?cat=0&search=";
@ -109,53 +111,50 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = SearchUrl + HttpUtility.UrlEncode(searchString);
var results = await client.GetStringAsync(episodeSearchUrl);
try
{
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = SearchUrl + HttpUtility.UrlEncode(searchString);
var results = await client.GetStringAsync(episodeSearchUrl);
try
CQ dom = results;
dom["#needseed"].Remove();
var rows = dom["table[width='750'] > tbody"].Children();
foreach (var row in rows.Skip(1))
{
CQ dom = results;
dom["#needseed"].Remove();
var rows = dom["table[width='750'] > tbody"].Children();
foreach (var row in rows.Skip(1))
{
var release = new ReleaseInfo();
var release = new ReleaseInfo();
var qRow = row.Cq();
var qLink = qRow.Children().ElementAt(2).Cq().Children("a").First();
var qRow = row.Cq();
var qLink = qRow.Children().ElementAt(2).Cq().Children("a").First();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qLink.Attr("title");
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + qLink.Attr("href"));
release.Comments = release.Guid;
release.Link = new Uri(string.Format(DownloadUrl, qLink.Attr("href").Split('=')[1]));
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qLink.Attr("title");
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + qLink.Attr("href"));
release.Comments = release.Guid;
release.Link = new Uri(string.Format(DownloadUrl, qLink.Attr("href").Split('=')[1]));
var dateString = qRow.Children().ElementAt(5).Cq().Text().Trim();
var pubDate = DateTime.ParseExact(dateString, "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture);
release.PublishDate = DateTime.SpecifyKind(pubDate, DateTimeKind.Local);
var dateString = qRow.Children().ElementAt(5).Cq().Text().Trim();
var pubDate = DateTime.ParseExact(dateString, "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture);
release.PublishDate = DateTime.SpecifyKind(pubDate, DateTimeKind.Local);
var sizeCol = qRow.Children().ElementAt(6);
var sizeVal = sizeCol.ChildNodes[0].NodeValue;
var sizeUnit = sizeCol.ChildNodes[2].NodeValue;
release.Size = ReleaseInfo.GetBytes(sizeUnit, ParseUtil.CoerceFloat(sizeVal));
var sizeCol = qRow.Children().ElementAt(6);
var sizeVal = sizeCol.ChildNodes[0].NodeValue;
var sizeUnit = sizeCol.ChildNodes[2].NodeValue;
release.Size = ReleaseInfo.GetBytes(sizeUnit, ParseUtil.CoerceFloat(sizeVal));
release.Seeders = ParseUtil.CoerceInt(qRow.Children().ElementAt(8).Cq().Text().Trim());
release.Peers = ParseUtil.CoerceInt(qRow.Children().ElementAt(9).Cq().Text().Trim()) + release.Seeders;
release.Seeders = ParseUtil.CoerceInt(qRow.Children().ElementAt(8).Cq().Text().Trim());
release.Peers = ParseUtil.CoerceInt(qRow.Children().ElementAt(9).Cq().Text().Trim()) + release.Seeders;
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -71,6 +71,8 @@ namespace Jackett
public Uri SiteLink { get { return new Uri(BaseUrl); } }
public bool RequiresRageIDLookupDisabled { get { return true; } }
public bool IsConfigured { get; private set; }
public async Task<ConfigurationData> GetConfigurationForSetup()
@ -130,64 +132,59 @@ namespace Jackett
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format("{0}?search={1}&cat=0", SearchUrl, HttpUtility.UrlEncode(searchString));
var results = await client.GetStringAsync(episodeSearchUrl);
try
{
CQ dom = results;
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format("{0}?search={1}&cat=0", SearchUrl, HttpUtility.UrlEncode(searchString));
var results = await client.GetStringAsync(episodeSearchUrl);
try
var table = dom["tbody > tr > .latest"].Parent().Parent();
foreach (var row in table.Children().Skip(1))
{
CQ dom = results;
var release = new ReleaseInfo();
var table = dom["tbody > tr > .latest"].Parent().Parent();
CQ qDetailsCol = row.ChildElements.ElementAt(1).Cq();
CQ qLink = qDetailsCol.Children("a").First();
foreach (var row in table.Children().Skip(1))
{
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Comments = new Uri(BaseUrl + "/" + qLink.Attr("href"));
release.Guid = release.Comments;
release.Title = qLink.Attr("title");
release.Description = release.Title;
CQ qDetailsCol = row.ChildElements.ElementAt(1).Cq();
CQ qLink = qDetailsCol.Children("a").First();
//"Tuesday, June 11th 2013 at 03:52:53 AM" to...
//"Tuesday June 11 2013 03:52:53 AM"
var timestamp = qDetailsCol.Children("font").Text().Trim() + " ";
var timeParts = new List<string>(timestamp.Replace(" at", "").Replace(",", "").Split(' '));
timeParts[2] = Regex.Replace(timeParts[2], "[^0-9.]", "");
var formattedTimeString = string.Join(" ", timeParts.ToArray()).Trim();
var date = DateTime.ParseExact(formattedTimeString, "dddd MMMM d yyyy hh:mm:ss tt", CultureInfo.InvariantCulture);
release.PublishDate = DateTime.SpecifyKind(date, DateTimeKind.Utc).ToLocalTime();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Comments = new Uri(BaseUrl + "/" + qLink.Attr("href"));
release.Guid = release.Comments;
release.Title = qLink.Attr("title");
release.Description = release.Title;
release.Link = new Uri(BaseUrl + "/" + row.ChildElements.ElementAt(2).Cq().Children("a.index").Attr("href"));
//"Tuesday, June 11th 2013 at 03:52:53 AM" to...
//"Tuesday June 11 2013 03:52:53 AM"
var timestamp = qDetailsCol.Children("font").Text().Trim() + " ";
var timeParts = new List<string>(timestamp.Replace(" at", "").Replace(",", "").Split(' '));
timeParts[2] = Regex.Replace(timeParts[2], "[^0-9.]", "");
var formattedTimeString = string.Join(" ", timeParts.ToArray()).Trim();
var date = DateTime.ParseExact(formattedTimeString, "dddd MMMM d yyyy hh:mm:ss tt", CultureInfo.InvariantCulture);
release.PublishDate = DateTime.SpecifyKind(date, DateTimeKind.Utc).ToLocalTime();
var sizeCol = row.ChildElements.ElementAt(6);
var sizeVal = ParseUtil.CoerceFloat(sizeCol.ChildNodes[0].NodeValue);
var sizeUnit = sizeCol.ChildNodes[2].NodeValue;
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
release.Link = new Uri(BaseUrl + "/" + row.ChildElements.ElementAt(2).Cq().Children("a.index").Attr("href"));
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(8).Cq().Text());
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(9).Cq().Text()) + release.Seeders;
var sizeCol = row.ChildElements.ElementAt(6);
var sizeVal = ParseUtil.CoerceFloat(sizeCol.ChildNodes[0].NodeValue);
var sizeUnit = sizeCol.ChildNodes[2].NodeValue;
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
//if (!release.Title.ToLower().Contains(title.ToLower()))
// continue;
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(8).Cq().Text());
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(9).Cq().Text()) + release.Seeders;
//if (!release.Title.ToLower().Contains(title.ToLower()))
// continue;
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();

View File

@ -44,9 +44,10 @@ namespace Jackett.Indexers
public Uri SiteLink
{
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
public bool IsConfigured { get; private set; }
const string BaseUrl = "http://www.frenchtorrentdb.com/";
const string MainUrl = BaseUrl + "?section=INDEX";
@ -114,59 +115,57 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var message = new HttpRequestMessage();
message.Method = HttpMethod.Get;
message.RequestUri = new Uri(episodeSearchUrl);
var response = await client.SendAsync(message);
var results = await response.Content.ReadAsStringAsync();
try
{
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var message = new HttpRequestMessage();
message.Method = HttpMethod.Get;
message.RequestUri = new Uri(episodeSearchUrl);
var response = await client.SendAsync(message);
var results = await response.Content.ReadAsStringAsync();
try
CQ dom = results;
var rows = dom[".results_index ul"];
foreach (var row in rows)
{
var release = new ReleaseInfo();
CQ qRow = row.Cq();
CQ qLink = qRow.Find("li.torrents_name > .torrents_name_link").First();
CQ qDlLink = qRow.Find("li.torrents_download > a").First();
CQ dom = results;
var rows = dom[".results_index ul"];
foreach (var row in rows)
{
var release = new ReleaseInfo();
CQ qRow = row.Cq();
CQ qLink = qRow.Find("li.torrents_name > .torrents_name_link").First();
CQ qDlLink = qRow.Find("li.torrents_download > a").First();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qLink.Text().Trim();
release.Description = release.Title;
release.Comments = new Uri(BaseUrl + "/" + qLink.Attr("href").TrimStart('/'));
release.Guid = release.Comments;
release.Link = new Uri(BaseUrl + "/" + qDlLink.Attr("href").TrimStart('/'));
release.PublishDate = DateTime.Now;
release.Seeders = ParseUtil.CoerceInt(qRow.Find("li.torrents_seeders").Text());
release.Peers = ParseUtil.CoerceInt(qRow.Find("li.torrents_leechers").Text()) + release.Seeders;
var sizeParts = qRow.Find("li.torrents_size").Text().Split(' ');
var sizeVal = ParseUtil.CoerceFloat(sizeParts[0]);
var sizeUnit = sizeParts[1];
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qLink.Text().Trim();
release.Description = release.Title;
release.Comments = new Uri(BaseUrl + "/" + qLink.Attr("href").TrimStart('/'));
release.Guid = release.Comments;
release.Link = new Uri(BaseUrl + "/" + qDlLink.Attr("href").TrimStart('/'));
release.PublishDate = DateTime.Now;
release.Seeders = ParseUtil.CoerceInt(qRow.Find("li.torrents_seeders").Text());
release.Peers = ParseUtil.CoerceInt(qRow.Find("li.torrents_leechers").Text()) + release.Seeders;
var sizeParts = qRow.Find("li.torrents_size").Text().Split(' ');
var sizeVal = ParseUtil.CoerceFloat(sizeParts[0]);
var sizeUnit = sizeParts[1];
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();
}
public Task<byte[]> Download(Uri link)
{
return client.GetByteArrayAsync(link);
}
}
}
}

View File

@ -37,6 +37,8 @@ namespace Jackett
public Uri SiteLink { get { return new Uri(BaseUrl); } }
public bool RequiresRageIDLookupDisabled { get { return true; } }
public event Action<IndexerInterface, JToken> OnSaveConfigurationRequested;
public Freshon()
@ -118,69 +120,66 @@ namespace Jackett
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
string episodeSearchUrl;
if (string.IsNullOrEmpty(query.SanitizedSearchTerm))
episodeSearchUrl = SearchUrl;
else
{
string episodeSearchUrl;
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
episodeSearchUrl = string.Format("{0}?search={1}&cat=0", SearchUrl, HttpUtility.UrlEncode(searchString));
}
if (string.IsNullOrEmpty(title))
episodeSearchUrl = SearchUrl;
else
var request = CreateHttpRequest(new Uri(episodeSearchUrl));
var response = await client.SendAsync(request);
var results = await response.Content.ReadAsStringAsync();
try
{
CQ dom = results;
var rows = dom["#highlight > tbody > tr"];
foreach (var row in rows.Skip(1))
{
var searchString = title + " " + query.GetEpisodeSearchString();
episodeSearchUrl = string.Format("{0}?search={1}&cat=0", SearchUrl, HttpUtility.UrlEncode(searchString));
}
var request = CreateHttpRequest(new Uri(episodeSearchUrl));
var response = await client.SendAsync(request);
var results = await response.Content.ReadAsStringAsync();
try
{
CQ dom = results;
var rows = dom["#highlight > tbody > tr"];
foreach (var row in rows.Skip(1))
{
var release = new ReleaseInfo();
var qRow = row.Cq();
var qLink = qRow.Find("a.torrent_name_link").First();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qLink.Attr("title");
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + qLink.Attr("href"));
release.Comments = release.Guid;
release.Link = new Uri(BaseUrl + qRow.Find("td.table_links > a").First().Attr("href"));
DateTime pubDate;
var dateString = qRow.Find("td.table_added").Text().Trim();
if (dateString.StartsWith("Today "))
pubDate = (DateTime.UtcNow + TimeSpan.Parse(dateString.Split(' ')[1])).ToLocalTime();
else if (dateString.StartsWith("Yesterday "))
pubDate = (DateTime.UtcNow + TimeSpan.Parse(dateString.Split(' ')[1]) - TimeSpan.FromDays(1)).ToLocalTime();
else
pubDate = DateTime.ParseExact(dateString, "d-MMM-yyyy HH:mm:ss", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal).ToLocalTime();
release.PublishDate = pubDate;
release.Seeders = ParseUtil.CoerceInt(qRow.Find("td.table_seeders").Text().Trim());
release.Peers = ParseUtil.CoerceInt(qRow.Find("td.table_leechers").Text().Trim()) + release.Seeders;
var sizeCol = qRow.Find("td.table_size")[0];
var sizeVal = ParseUtil.CoerceFloat(sizeCol.ChildNodes[0].NodeValue.Trim());
var sizeUnit = sizeCol.ChildNodes[2].NodeValue.Trim();
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
var release = new ReleaseInfo();
var qRow = row.Cq();
var qLink = qRow.Find("a.torrent_name_link").First();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qLink.Attr("title");
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + qLink.Attr("href"));
release.Comments = release.Guid;
release.Link = new Uri(BaseUrl + qRow.Find("td.table_links > a").First().Attr("href"));
DateTime pubDate;
var dateString = qRow.Find("td.table_added").Text().Trim();
if (dateString.StartsWith("Today "))
pubDate = (DateTime.UtcNow + TimeSpan.Parse(dateString.Split(' ')[1])).ToLocalTime();
else if (dateString.StartsWith("Yesterday "))
pubDate = (DateTime.UtcNow + TimeSpan.Parse(dateString.Split(' ')[1]) - TimeSpan.FromDays(1)).ToLocalTime();
else
pubDate = DateTime.ParseExact(dateString, "d-MMM-yyyy HH:mm:ss", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal).ToLocalTime();
release.PublishDate = pubDate;
release.Seeders = ParseUtil.CoerceInt(qRow.Find("td.table_seeders").Text().Trim());
release.Peers = ParseUtil.CoerceInt(qRow.Find("td.table_leechers").Text().Trim()) + release.Seeders;
var sizeCol = qRow.Find("td.table_size")[0];
var sizeVal = ParseUtil.CoerceFloat(sizeCol.ChildNodes[0].NodeValue.Trim());
var sizeUnit = sizeCol.ChildNodes[2].NodeValue.Trim();
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -58,6 +58,8 @@ namespace Jackett.Indexers
get { return new Uri(DefaultUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
public bool IsConfigured
{
get;
@ -131,11 +133,10 @@ namespace Jackett.Indexers
List<ReleaseInfo> releases = new List<ReleaseInfo>();
List<string> searchurls = new List<string>();
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
for (int page = 0; page < MAXPAGES; page++)
{
var searchString = title + " " + query.GetEpisodeSearchString();
for (int page = 0; page < MAXPAGES; page++)
searchurls.Add(string.Format(SearchUrl, HttpUtility.UrlEncode(searchString.Trim()), page));
searchurls.Add(string.Format(SearchUrl, HttpUtility.UrlEncode(searchString.Trim()), page));
}
foreach (string SearchUrl in searchurls)
@ -176,7 +177,7 @@ namespace Jackett.Indexers
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
int seeders, peers;
if (ParseUtil.TryCoerceInt(qRow.Find("td").Get(9).FirstChild.FirstChild.InnerText, out seeders))
@ -214,7 +215,7 @@ namespace Jackett.Indexers
string[] dateSplit = qRow.Find("td.mainblockcontent").Get(5).InnerHTML.Split(',');
string dateString = dateSplit[1].Substring(0, dateSplit[1].IndexOf('>'));
release.PublishDate = DateTime.Parse(dateString, CultureInfo.InvariantCulture);
releases.Add(release);
}
}

View File

@ -24,6 +24,8 @@ namespace Jackett.Indexers
public Uri SiteLink { get { return new Uri(BaseUrl); } }
public bool RequiresRageIDLookupDisabled { get { return true; } }
public bool IsConfigured { get; private set; }
static string chromeUserAgent = BrowserUtil.ChromeUserAgent;
@ -120,75 +122,70 @@ namespace Jackett.Indexers
List<ReleaseInfo> releases = new List<ReleaseInfo>();
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = SearchUrl + HttpUtility.UrlEncode(searchString);
var request = CreateHttpRequest(new Uri(episodeSearchUrl));
var response = await client.SendAsync(request);
var results = await response.Content.ReadAsStringAsync();
try
{
CQ dom = results;
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = SearchUrl + HttpUtility.UrlEncode(searchString);
var request = CreateHttpRequest(new Uri(episodeSearchUrl));
var response = await client.SendAsync(request);
var results = await response.Content.ReadAsStringAsync();
try
var rows = dom["table.torrents > tbody > tr"];
foreach (var row in rows.Skip(1))
{
CQ dom = results;
var release = new ReleaseInfo();
var rows = dom["table.torrents > tbody > tr"];
foreach (var row in rows.Skip(1))
{
var release = new ReleaseInfo();
var qRow = row.Cq();
var qRow = row.Cq();
var qTitleLink = qRow.Find("a.t_title").First();
release.Title = qTitleLink.Text().Trim();
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + qTitleLink.Attr("href"));
release.Comments = release.Guid;
var qTitleLink = qRow.Find("a.t_title").First();
release.Title = qTitleLink.Text().Trim();
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + qTitleLink.Attr("href"));
release.Comments = release.Guid;
DateTime pubDate;
var descString = qRow.Find(".t_ctime").Text();
var dateString = descString.Split('|').Last().Trim();
dateString = dateString.Split(new string[] { " by " }, StringSplitOptions.None)[0];
var dateValue = ParseUtil.CoerceFloat(dateString.Split(' ')[0]);
var dateUnit = dateString.Split(' ')[1];
if (dateUnit.Contains("minute"))
pubDate = DateTime.Now - TimeSpan.FromMinutes(dateValue);
else if (dateUnit.Contains("hour"))
pubDate = DateTime.Now - TimeSpan.FromHours(dateValue);
else if (dateUnit.Contains("day"))
pubDate = DateTime.Now - TimeSpan.FromDays(dateValue);
else if (dateUnit.Contains("week"))
pubDate = DateTime.Now - TimeSpan.FromDays(7 * dateValue);
else if (dateUnit.Contains("month"))
pubDate = DateTime.Now - TimeSpan.FromDays(30 * dateValue);
else if (dateUnit.Contains("year"))
pubDate = DateTime.Now - TimeSpan.FromDays(365 * dateValue);
else
pubDate = DateTime.MinValue;
release.PublishDate = pubDate;
DateTime pubDate;
var descString = qRow.Find(".t_ctime").Text();
var dateString = descString.Split('|').Last().Trim();
dateString = dateString.Split(new string[] { " by " }, StringSplitOptions.None)[0];
var dateValue = ParseUtil.CoerceFloat(dateString.Split(' ')[0]);
var dateUnit = dateString.Split(' ')[1];
if (dateUnit.Contains("minute"))
pubDate = DateTime.Now - TimeSpan.FromMinutes(dateValue);
else if (dateUnit.Contains("hour"))
pubDate = DateTime.Now - TimeSpan.FromHours(dateValue);
else if (dateUnit.Contains("day"))
pubDate = DateTime.Now - TimeSpan.FromDays(dateValue);
else if (dateUnit.Contains("week"))
pubDate = DateTime.Now - TimeSpan.FromDays(7 * dateValue);
else if (dateUnit.Contains("month"))
pubDate = DateTime.Now - TimeSpan.FromDays(30 * dateValue);
else if (dateUnit.Contains("year"))
pubDate = DateTime.Now - TimeSpan.FromDays(365 * dateValue);
else
pubDate = DateTime.MinValue;
release.PublishDate = pubDate;
var qLink = row.ChildElements.ElementAt(3).Cq().Children("a");
release.Link = new Uri(BaseUrl + qLink.Attr("href"));
var qLink = row.ChildElements.ElementAt(3).Cq().Children("a");
release.Link = new Uri(BaseUrl + qLink.Attr("href"));
var sizeStr = row.ChildElements.ElementAt(5).Cq().Text().Trim();
var sizeVal = ParseUtil.CoerceFloat(sizeStr.Split(' ')[0]);
var sizeUnit = sizeStr.Split(' ')[1];
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
var sizeStr = row.ChildElements.ElementAt(5).Cq().Text().Trim();
var sizeVal = ParseUtil.CoerceFloat(sizeStr.Split(' ')[0]);
var sizeUnit = sizeStr.Split(' ')[1];
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".t_seeders").Text().Trim());
release.Peers = ParseUtil.CoerceInt(qRow.Find(".t_leechers").Text().Trim()) + release.Seeders;
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".t_seeders").Text().Trim());
release.Peers = ParseUtil.CoerceInt(qRow.Find(".t_leechers").Text().Trim()) + release.Seeders;
releases.Add(release);
}
releases.Add(release);
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();

View File

@ -29,6 +29,7 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
public event Action<IndexerInterface, JToken> OnSaveConfigurationRequested;
public event Action<IndexerInterface, string, Exception> OnResultParsingError;
@ -145,67 +146,63 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = SearchUrl + HttpUtility.UrlEncode(searchString);
string results;
if (Program.IsWindows)
{
results = await client.GetStringAsync(episodeSearchUrl, retries);
}
else
{
var response = await CurlHelper.GetAsync(episodeSearchUrl, cookieHeader);
results = Encoding.UTF8.GetString(response.Content);
}
try
{
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = SearchUrl + HttpUtility.UrlEncode(searchString);
string results;
if (Program.IsWindows)
var json = JObject.Parse(results);
foreach (JObject r in json["response"]["results"])
{
results = await client.GetStringAsync(episodeSearchUrl, retries);
}
else
{
var response = await CurlHelper.GetAsync(episodeSearchUrl, cookieHeader);
results = Encoding.UTF8.GetString(response.Content);
}
try
{
var json = JObject.Parse(results);
foreach (JObject r in json["response"]["results"])
DateTime pubDate = DateTime.MinValue;
double dateNum;
if (double.TryParse((string)r["groupTime"], out dateNum))
{
DateTime pubDate = DateTime.MinValue;
double dateNum;
if (double.TryParse((string)r["groupTime"], out dateNum))
{
pubDate = UnixTimestampToDateTime(dateNum);
pubDate = DateTime.SpecifyKind(pubDate, DateTimeKind.Utc).ToLocalTime();
}
pubDate = UnixTimestampToDateTime(dateNum);
pubDate = DateTime.SpecifyKind(pubDate, DateTimeKind.Utc).ToLocalTime();
}
var groupName = (string)r["groupName"];
var groupName = (string)r["groupName"];
if (r["torrents"] is JArray)
{
foreach (JObject t in r["torrents"])
{
var release = new ReleaseInfo();
release.PublishDate = pubDate;
release.Title = groupName;
release.Description = groupName;
FillReleaseInfoFromJson(release, t);
releases.Add(release);
}
}
else
if (r["torrents"] is JArray)
{
foreach (JObject t in r["torrents"])
{
var release = new ReleaseInfo();
release.PublishDate = pubDate;
release.Title = groupName;
release.Description = groupName;
FillReleaseInfoFromJson(release, r);
FillReleaseInfoFromJson(release, t);
releases.Add(release);
}
}
else
{
var release = new ReleaseInfo();
release.PublishDate = pubDate;
release.Title = groupName;
release.Description = groupName;
FillReleaseInfoFromJson(release, r);
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();

View File

@ -31,6 +31,8 @@ namespace Jackett.Indexers
get { return new Uri("https://rarbg.com"); }
}
public bool RequiresRageIDLookupDisabled { get { return false; } }
public bool IsConfigured { get; private set; }
const string DefaultUrl = "http://torrentapi.org";
@ -127,7 +129,7 @@ namespace Jackett.Indexers
if (query.RageID != 0)
searchUrl = string.Format(baseUrl + SearchTVRageUrl, query.RageID, token);
else
searchUrl = string.Format(baseUrl + SearchQueryUrl, query.SearchTerm, token);
searchUrl = string.Format(baseUrl + SearchQueryUrl, query.SanitizedSearchTerm, token);
var request = CreateHttpRequest(searchUrl);
var response = await client.SendAsync(request);

View File

@ -31,6 +31,8 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
const string BaseUrl = "https://sceneaccess.eu";
const string LoginUrl = BaseUrl + "/login";
const string SearchUrl = BaseUrl + "/{0}?method=1&c{1}=1&search={2}";
@ -125,64 +127,61 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var searchSection = string.IsNullOrEmpty(query.Episode) ? "archive" : "browse";
var searchCategory = string.IsNullOrEmpty(query.Episode) ? "26" : "27";
var searchUrl = string.Format(SearchUrl, searchSection, searchCategory, searchString);
string results;
if (Program.IsWindows)
{
var searchString = title + " " + query.GetEpisodeSearchString();
var searchSection = string.IsNullOrEmpty(query.Episode) ? "archive" : "browse";
var searchCategory = string.IsNullOrEmpty(query.Episode) ? "26" : "27";
results = await client.GetStringAsync(searchUrl);
}
else
{
var response = await CurlHelper.GetAsync(searchUrl, cookieHeader);
results = Encoding.UTF8.GetString(response.Content);
}
var searchUrl = string.Format(SearchUrl, searchSection, searchCategory, searchString);
try
{
CQ dom = results;
var rows = dom["#torrents-table > tbody > tr.tt_row"];
foreach (var row in rows)
{
CQ qRow = row.Cq();
var release = new ReleaseInfo();
string results;
if (Program.IsWindows)
{
results = await client.GetStringAsync(searchUrl);
}
else
{
var response = await CurlHelper.GetAsync(searchUrl, cookieHeader);
results = Encoding.UTF8.GetString(response.Content);
}
release.MinimumRatio = 1;
release.MinimumSeedTime = 129600;
release.Title = qRow.Find(".ttr_name > a").Text();
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + "/" + qRow.Find(".ttr_name > a").Attr("href"));
release.Comments = release.Guid;
release.Link = new Uri(BaseUrl + "/" + qRow.Find(".td_dl > a").Attr("href"));
try
{
CQ dom = results;
var rows = dom["#torrents-table > tbody > tr.tt_row"];
foreach (var row in rows)
var sizeStr = qRow.Find(".ttr_size").Contents()[0].NodeValue;
var sizeParts = sizeStr.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
var timeStr = qRow.Find(".ttr_added").Text();
DateTime time;
if (DateTime.TryParseExact(timeStr, "yyyy-MM-ddHH:mm:ss", CultureInfo.InvariantCulture, DateTimeStyles.None, out time))
{
CQ qRow = row.Cq();
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 129600;
release.Title = qRow.Find(".ttr_name > a").Text();
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + "/" + qRow.Find(".ttr_name > a").Attr("href"));
release.Comments = release.Guid;
release.Link = new Uri(BaseUrl + "/" + qRow.Find(".td_dl > a").Attr("href"));
var sizeStr = qRow.Find(".ttr_size").Contents()[0].NodeValue;
var sizeParts = sizeStr.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
var timeStr = qRow.Find(".ttr_added").Text();
DateTime time;
if (DateTime.TryParseExact(timeStr, "yyyy-MM-ddHH:mm:ss", CultureInfo.InvariantCulture, DateTimeStyles.None, out time))
{
release.PublishDate = time;
}
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".ttr_seeders").Text());
release.Peers = ParseUtil.CoerceInt(qRow.Find(".ttr_leechers").Text()) + release.Seeders;
releases.Add(release);
release.PublishDate = time;
}
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".ttr_seeders").Text());
release.Peers = ParseUtil.CoerceInt(qRow.Find(".ttr_leechers").Text()) + release.Seeders;
releases.Add(release);
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();

View File

@ -33,6 +33,8 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
public bool IsConfigured { get; private set; }
const string BaseUrl = "https://www.scenetime.com";
@ -118,56 +120,53 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var searchContent = GetSearchFormData(searchString);
var response = await client.PostAsync(SearchUrl, searchContent);
var results = await response.Content.ReadAsStringAsync();
try
{
var searchString = title + " " + query.GetEpisodeSearchString();
var searchContent = GetSearchFormData(searchString);
var response = await client.PostAsync(SearchUrl, searchContent);
var results = await response.Content.ReadAsStringAsync();
try
CQ dom = results;
var rows = dom["tr.browse"];
foreach (var row in rows)
{
CQ dom = results;
var rows = dom["tr.browse"];
foreach (var row in rows)
{
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
var descCol = row.ChildElements.ElementAt(1);
var qDescCol = descCol.Cq();
var qLink = qDescCol.Find("a");
release.Title = qLink.Text();
release.Description = release.Title;
release.Comments = new Uri(BaseUrl + "/" + qLink.Attr("href"));
release.Guid = release.Comments;
var torrentId = qLink.Attr("href").Split('=')[1];
release.Link = new Uri(string.Format(DownloadUrl, torrentId));
var descCol = row.ChildElements.ElementAt(1);
var qDescCol = descCol.Cq();
var qLink = qDescCol.Find("a");
release.Title = qLink.Text();
release.Description = release.Title;
release.Comments = new Uri(BaseUrl + "/" + qLink.Attr("href"));
release.Guid = release.Comments;
var torrentId = qLink.Attr("href").Split('=')[1];
release.Link = new Uri(string.Format(DownloadUrl, torrentId));
var dateStr = descCol.ChildNodes.Last().NodeValue.Trim();
var euDate = DateTime.ParseExact(dateStr, "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture);
var localDate = TimeZoneInfo.ConvertTimeToUtc(euDate, TimeZoneInfo.FindSystemTimeZoneById("Central European Standard Time")).ToLocalTime();
release.PublishDate = localDate;
var dateStr = descCol.ChildNodes.Last().NodeValue.Trim();
var euDate = DateTime.ParseExact(dateStr, "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture);
var localDate = TimeZoneInfo.ConvertTimeToUtc(euDate, TimeZoneInfo.FindSystemTimeZoneById("Central European Standard Time")).ToLocalTime();
release.PublishDate = localDate;
var sizeNodes = row.ChildElements.ElementAt(3).ChildNodes;
var sizeVal = sizeNodes.First().NodeValue;
var sizeUnit = sizeNodes.Last().NodeValue;
release.Size = ReleaseInfo.GetBytes(sizeUnit, ParseUtil.CoerceFloat(sizeVal));
var sizeNodes = row.ChildElements.ElementAt(3).ChildNodes;
var sizeVal = sizeNodes.First().NodeValue;
var sizeUnit = sizeNodes.Last().NodeValue;
release.Size = ReleaseInfo.GetBytes(sizeUnit, ParseUtil.CoerceFloat(sizeVal));
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(4).Cq().Text().Trim());
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(5).Cq().Text().Trim()) + release.Seeders;
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(4).Cq().Text().Trim());
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(5).Cq().Text().Trim()) + release.Seeders;
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -33,6 +33,8 @@ namespace Jackett.Indexers
get { return new Uri(DefaultUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
const string DefaultUrl = "http://showrss.info";
const string searchAllUrl = DefaultUrl + "/feeds/all.rss";
string BaseUrl;
@ -117,57 +119,54 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(searchAllUrl);
XmlDocument xmlDoc = new XmlDocument();
string xml = string.Empty;
WebClient wc = getWebClient();
try
{
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(searchAllUrl);
XmlDocument xmlDoc = new XmlDocument();
string xml = string.Empty;
WebClient wc = getWebClient();
try
using (wc)
{
using (wc)
{
xml = wc.DownloadString(episodeSearchUrl);
xmlDoc.LoadXml(xml);
}
ReleaseInfo release;
string serie_title;
foreach (XmlNode node in xmlDoc.GetElementsByTagName("item"))
{
release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
serie_title = node.SelectSingleNode("title").InnerText;
release.Title = serie_title;
release.Comments = new Uri(node.SelectSingleNode("link").InnerText);
release.Category = node.SelectSingleNode("title").InnerText;
var test = node.SelectSingleNode("enclosure");
release.Guid = new Uri(test.Attributes["url"].Value);
release.PublishDate = DateTime.Parse(node.SelectSingleNode("pubDate").InnerText, CultureInfo.InvariantCulture);
release.Description = node.SelectSingleNode("description").InnerText;
release.InfoHash = node.SelectSingleNode("description").InnerText;
release.Size = 0;
release.Seeders = 1;
release.Peers = 1;
release.MagnetUri = new Uri(node.SelectSingleNode("link").InnerText);
releases.Add(release);
}
xml = wc.DownloadString(episodeSearchUrl);
xmlDoc.LoadXml(xml);
}
catch (Exception ex)
ReleaseInfo release;
string serie_title;
foreach (XmlNode node in xmlDoc.GetElementsByTagName("item"))
{
OnResultParsingError(this, xml, ex);
throw ex;
release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
serie_title = node.SelectSingleNode("title").InnerText;
release.Title = serie_title;
release.Comments = new Uri(node.SelectSingleNode("link").InnerText);
release.Category = node.SelectSingleNode("title").InnerText;
var test = node.SelectSingleNode("enclosure");
release.Guid = new Uri(test.Attributes["url"].Value);
release.PublishDate = DateTime.Parse(node.SelectSingleNode("pubDate").InnerText, CultureInfo.InvariantCulture);
release.Description = node.SelectSingleNode("description").InnerText;
release.InfoHash = node.SelectSingleNode("description").InnerText;
release.Size = 0;
release.Seeders = 1;
release.Peers = 1;
release.MagnetUri = new Uri(node.SelectSingleNode("link").InnerText);
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, xml, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -32,6 +32,8 @@ namespace Jackett.Indexers
get { return new Uri(DefaultUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
public bool IsConfigured { get; private set; }
const string DefaultUrl = "https://getstrike.net";
@ -98,47 +100,47 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
foreach (var title in query.ShowTitles ?? new string[] { "2015" })
var searchTerm = query.SanitizedSearchTerm ?? "2015";
var searchString = searchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = baseUrl + string.Format(SearchUrl, HttpUtility.UrlEncode(searchString.Trim()));
var results = await client.GetStringAsync(episodeSearchUrl);
try
{
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = baseUrl + string.Format(SearchUrl, HttpUtility.UrlEncode(searchString.Trim()));
var results = await client.GetStringAsync(episodeSearchUrl);
try
var jResults = JObject.Parse(results);
foreach (JObject result in (JArray)jResults["torrents"])
{
var jResults = JObject.Parse(results);
foreach (JObject result in (JArray)jResults["torrents"])
{
var release = new ReleaseInfo();
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = (string)result["torrent_title"];
release.Description = release.Title;
release.Seeders = (int)result["seeds"];
release.Peers = (int)result["leeches"] + release.Seeders;
release.Size = (long)result["size"];
release.Title = (string)result["torrent_title"];
release.Description = release.Title;
release.Seeders = (int)result["seeds"];
release.Peers = (int)result["leeches"] + release.Seeders;
release.Size = (long)result["size"];
// "Apr 2, 2015", "Apr 12, 2015" (note the spacing)
var dateString = string.Join(" ", ((string)result["upload_date"]).Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries));
release.PublishDate = DateTime.ParseExact(dateString, "MMM d, yyyy", CultureInfo.InvariantCulture);
// "Apr 2, 2015", "Apr 12, 2015" (note the spacing)
var dateString = string.Join(" ", ((string)result["upload_date"]).Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries));
release.PublishDate = DateTime.ParseExact(dateString, "MMM d, yyyy", CultureInfo.InvariantCulture);
release.Guid = new Uri((string)result["page"]);
release.Comments = release.Guid;
release.Guid = new Uri((string)result["page"]);
release.Comments = release.Guid;
release.InfoHash = (string)result["torrent_hash"];
release.MagnetUri = new Uri((string)result["magnet_uri"]);
release.Link = new Uri(string.Format("{0}{1}", baseUrl, string.Format(DownloadUrl, release.InfoHash)));
release.InfoHash = (string)result["torrent_hash"];
release.MagnetUri = new Uri((string)result["magnet_uri"]);
release.Link = new Uri(string.Format("{0}{1}", baseUrl, string.Format(DownloadUrl, release.InfoHash)));
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -34,6 +34,8 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
public bool IsConfigured { get; private set; }
const string BaseUrl = "http://www.t411.io";
@ -136,53 +138,51 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
foreach (var title in query.ShowTitles ?? new string[] { "%20" })
var searchTerm = string.IsNullOrEmpty(query.SanitizedSearchTerm) ? "%20" : query.SanitizedSearchTerm;
var searchString = searchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var message = new HttpRequestMessage();
message.Method = HttpMethod.Get;
message.RequestUri = new Uri(episodeSearchUrl);
message.Headers.TryAddWithoutValidation("Authorization", await GetAuthToken());
var response = await client.SendAsync(message);
var results = await response.Content.ReadAsStringAsync();
var jsonResult = JObject.Parse(results);
try
{
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var message = new HttpRequestMessage();
message.Method = HttpMethod.Get;
message.RequestUri = new Uri(episodeSearchUrl);
message.Headers.TryAddWithoutValidation("Authorization", await GetAuthToken());
var response = await client.SendAsync(message);
var results = await response.Content.ReadAsStringAsync();
var jsonResult = JObject.Parse(results);
try
var items = (JArray)jsonResult["torrents"];
foreach (var item in items)
{
var items = (JArray)jsonResult["torrents"];
foreach (var item in items)
{
var release = new ReleaseInfo();
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
var torrentId = (string)item["id"];
release.Link = new Uri(string.Format(DownloadUrl, torrentId));
release.Title = (string)item["name"];
release.Description = release.Title;
release.Comments = new Uri(string.Format(CommentsUrl, (string)item["rewritename"]));
release.Guid = release.Comments;
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
var torrentId = (string)item["id"];
release.Link = new Uri(string.Format(DownloadUrl, torrentId));
release.Title = (string)item["name"];
release.Description = release.Title;
release.Comments = new Uri(string.Format(CommentsUrl, (string)item["rewritename"]));
release.Guid = release.Comments;
var dateUtc = DateTime.ParseExact((string)item["added"], "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture);
release.PublishDate = DateTime.SpecifyKind(dateUtc, DateTimeKind.Utc).ToLocalTime();
var dateUtc = DateTime.ParseExact((string)item["added"], "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture);
release.PublishDate = DateTime.SpecifyKind(dateUtc, DateTimeKind.Utc).ToLocalTime();
release.Seeders = ParseUtil.CoerceInt((string)item["seeders"]);
release.Peers = ParseUtil.CoerceInt((string)item["leechers"]) + release.Seeders;
release.Seeders = ParseUtil.CoerceInt((string)item["seeders"]);
release.Peers = ParseUtil.CoerceInt((string)item["leechers"]) + release.Seeders;
release.Size = ParseUtil.CoerceLong((string)item["size"]);
release.Size = ParseUtil.CoerceLong((string)item["size"]);
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -26,6 +26,8 @@ namespace Jackett.Indexers
public Uri SiteLink { get { return new Uri(DefaultUrl); } }
public bool RequiresRageIDLookupDisabled { get { return true; } }
public bool IsConfigured { get; private set; }
const string DefaultUrl = "https://thepiratebay.mn";
@ -93,101 +95,90 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
List<string> searchUrls = new List<string>();
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var queryStr = HttpUtility.UrlEncode(searchString);
var episodeSearchUrl = baseUrl + string.Format(SearchUrl, queryStr);
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
string results;
if (Program.IsWindows)
{
var searchString = title + " " + query.GetEpisodeSearchString();
var queryStr = HttpUtility.UrlEncode(searchString);
var episodeSearchUrl = baseUrl + string.Format(SearchUrl, queryStr);
searchUrls.Add(episodeSearchUrl);
results = await client.GetStringAsync(episodeSearchUrl);
}
else
{
var response = await CurlHelper.GetAsync(episodeSearchUrl, null, episodeSearchUrl);
results = Encoding.UTF8.GetString(response.Content);
}
foreach (var episodeSearchUrl in searchUrls)
try
{
CQ dom = results;
string results;
if (Program.IsWindows)
var rows = dom["#searchResult > tbody > tr"];
foreach (var row in rows)
{
results = await client.GetStringAsync(episodeSearchUrl);
}
else
{
var response = await CurlHelper.GetAsync(episodeSearchUrl, null, episodeSearchUrl);
results = Encoding.UTF8.GetString(response.Content);
}
var release = new ReleaseInfo();
try
{
CQ dom = results;
CQ qRow = row.Cq();
CQ qLink = qRow.Find(".detName > .detLink").First();
var rows = dom["#searchResult > tbody > tr"];
foreach (var row in rows)
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qLink.Text().Trim();
release.Description = release.Title;
release.Comments = new Uri(baseUrl + "/" + qLink.Attr("href").TrimStart('/'));
release.Guid = release.Comments;
var downloadCol = row.ChildElements.ElementAt(1).Cq().Children("a");
release.MagnetUri = new Uri(downloadCol.Attr("href"));
release.InfoHash = release.MagnetUri.ToString().Split(':')[3].Split('&')[0];
var descString = qRow.Find(".detDesc").Text().Trim();
var descParts = descString.Split(',');
var timeString = descParts[0].Split(' ')[1];
if (timeString.Contains("mins ago"))
{
var release = new ReleaseInfo();
CQ qRow = row.Cq();
CQ qLink = qRow.Find(".detName > .detLink").First();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qLink.Text().Trim();
release.Description = release.Title;
release.Comments = new Uri(baseUrl + "/" + qLink.Attr("href").TrimStart('/'));
release.Guid = release.Comments;
var downloadCol = row.ChildElements.ElementAt(1).Cq().Children("a");
release.MagnetUri = new Uri(downloadCol.Attr("href"));
release.InfoHash = release.MagnetUri.ToString().Split(':')[3].Split('&')[0];
var descString = qRow.Find(".detDesc").Text().Trim();
var descParts = descString.Split(',');
var timeString = descParts[0].Split(' ')[1];
if (timeString.Contains("mins ago"))
{
release.PublishDate = (DateTime.Now - TimeSpan.FromMinutes(ParseUtil.CoerceInt(timeString.Split(' ')[0])));
}
else if (timeString.Contains("Today"))
{
release.PublishDate = (DateTime.UtcNow - TimeSpan.FromHours(2) - TimeSpan.Parse(timeString.Split(' ')[1])).ToLocalTime();
}
else if (timeString.Contains("Y-day"))
{
release.PublishDate = (DateTime.UtcNow - TimeSpan.FromHours(26) - TimeSpan.Parse(timeString.Split(' ')[1])).ToLocalTime();
}
else if (timeString.Contains(':'))
{
var utc = DateTime.ParseExact(timeString, "MM-dd HH:mm", CultureInfo.InvariantCulture) - TimeSpan.FromHours(2);
release.PublishDate = DateTime.SpecifyKind(utc, DateTimeKind.Utc).ToLocalTime();
}
else
{
var utc = DateTime.ParseExact(timeString, "MM-dd yyyy", CultureInfo.InvariantCulture) - TimeSpan.FromHours(2);
release.PublishDate = DateTime.SpecifyKind(utc, DateTimeKind.Utc).ToLocalTime();
}
var sizeParts = descParts[1].Split(new char[] { ' ', ' ' }, StringSplitOptions.RemoveEmptyEntries);
var sizeVal = ParseUtil.CoerceFloat(sizeParts[1]);
var sizeUnit = sizeParts[2];
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(2).Cq().Text());
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(3).Cq().Text()) + release.Seeders;
releases.Add(release);
release.PublishDate = (DateTime.Now - TimeSpan.FromMinutes(ParseUtil.CoerceInt(timeString.Split(' ')[0])));
}
else if (timeString.Contains("Today"))
{
release.PublishDate = (DateTime.UtcNow - TimeSpan.FromHours(2) - TimeSpan.Parse(timeString.Split(' ')[1])).ToLocalTime();
}
else if (timeString.Contains("Y-day"))
{
release.PublishDate = (DateTime.UtcNow - TimeSpan.FromHours(26) - TimeSpan.Parse(timeString.Split(' ')[1])).ToLocalTime();
}
else if (timeString.Contains(':'))
{
var utc = DateTime.ParseExact(timeString, "MM-dd HH:mm", CultureInfo.InvariantCulture) - TimeSpan.FromHours(2);
release.PublishDate = DateTime.SpecifyKind(utc, DateTimeKind.Utc).ToLocalTime();
}
else
{
var utc = DateTime.ParseExact(timeString, "MM-dd yyyy", CultureInfo.InvariantCulture) - TimeSpan.FromHours(2);
release.PublishDate = DateTime.SpecifyKind(utc, DateTimeKind.Utc).ToLocalTime();
}
var sizeParts = descParts[1].Split(new char[] { ' ', ' ' }, StringSplitOptions.RemoveEmptyEntries);
var sizeVal = ParseUtil.CoerceFloat(sizeParts[1]);
var sizeUnit = sizeParts[2];
release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal);
release.Seeders = ParseUtil.CoerceInt(row.ChildElements.ElementAt(2).Cq().Text());
release.Peers = ParseUtil.CoerceInt(row.ChildElements.ElementAt(3).Cq().Text()) + release.Seeders;
releases.Add(release);
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -33,6 +33,8 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
public bool IsConfigured { get; private set; }
const string BaseUrl = "https://torrentday.eu";
@ -126,64 +128,61 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var results = await client.GetStringAsync(episodeSearchUrl);
try
{
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var results = await client.GetStringAsync(episodeSearchUrl);
try
CQ dom = results;
var rows = dom["#torrentTable > tbody > tr.browse"];
foreach (var row in rows)
{
CQ dom = results;
var rows = dom["#torrentTable > tbody > tr.browse"];
foreach (var row in rows)
{
CQ qRow = row.Cq();
var release = new ReleaseInfo();
CQ qRow = row.Cq();
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qRow.Find(".torrentName").Text();
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + "/" + qRow.Find(".torrentName").Attr("href"));
release.Comments = release.Guid;
release.Link = new Uri(BaseUrl + "/" + qRow.Find(".dlLinksInfo > a").Attr("href"));
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qRow.Find(".torrentName").Text();
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + "/" + qRow.Find(".torrentName").Attr("href"));
release.Comments = release.Guid;
release.Link = new Uri(BaseUrl + "/" + qRow.Find(".dlLinksInfo > a").Attr("href"));
var sizeStr = qRow.Find(".sizeInfo").Text().Trim();
var sizeParts = sizeStr.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
var sizeStr = qRow.Find(".sizeInfo").Text().Trim();
var sizeParts = sizeStr.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
var dateStr = qRow.Find(".ulInfo").Text().Split('|').Last().Trim();
var dateParts = dateStr.Split(' ');
var dateValue = ParseUtil.CoerceInt(dateParts[0]);
TimeSpan ts = TimeSpan.Zero;
if (dateStr.Contains("sec"))
ts = TimeSpan.FromSeconds(dateValue);
else if (dateStr.Contains("min"))
ts = TimeSpan.FromMinutes(dateValue);
else if (dateStr.Contains("hour"))
ts = TimeSpan.FromHours(dateValue);
else if (dateStr.Contains("day"))
ts = TimeSpan.FromDays(dateValue);
else if (dateStr.Contains("week"))
ts = TimeSpan.FromDays(dateValue * 7);
else if (dateStr.Contains("month"))
ts = TimeSpan.FromDays(dateValue * 30);
else if (dateStr.Contains("year"))
ts = TimeSpan.FromDays(dateValue * 365);
release.PublishDate = DateTime.Now - ts;
var dateStr = qRow.Find(".ulInfo").Text().Split('|').Last().Trim();
var dateParts = dateStr.Split(' ');
var dateValue = ParseUtil.CoerceInt(dateParts[0]);
TimeSpan ts = TimeSpan.Zero;
if (dateStr.Contains("sec"))
ts = TimeSpan.FromSeconds(dateValue);
else if (dateStr.Contains("min"))
ts = TimeSpan.FromMinutes(dateValue);
else if (dateStr.Contains("hour"))
ts = TimeSpan.FromHours(dateValue);
else if (dateStr.Contains("day"))
ts = TimeSpan.FromDays(dateValue);
else if (dateStr.Contains("week"))
ts = TimeSpan.FromDays(dateValue * 7);
else if (dateStr.Contains("month"))
ts = TimeSpan.FromDays(dateValue * 30);
else if (dateStr.Contains("year"))
ts = TimeSpan.FromDays(dateValue * 365);
release.PublishDate = DateTime.Now - ts;
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".seedersInfo").Text());
release.Peers = ParseUtil.CoerceInt(qRow.Find(".leechersInfo").Text()) + release.Seeders;
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".seedersInfo").Text());
release.Peers = ParseUtil.CoerceInt(qRow.Find(".leechersInfo").Text()) + release.Seeders;
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -33,6 +33,8 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
const string BaseUrl = "http://www.torrentleech.org";
const string LoginUrl = BaseUrl + "/user/account/login/";
const string SearchUrl = BaseUrl + "/torrents/browse/index/query/{0}/categories/2%2C26%2C27%2C32/orderby/added?";
@ -109,60 +111,54 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var results = await client.GetStringAsync(episodeSearchUrl);
try
{
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var results = await client.GetStringAsync(episodeSearchUrl);
try
CQ dom = results;
CQ qRows = dom["#torrenttable > tbody > tr"];
foreach (var row in qRows)
{
CQ dom = results;
var release = new ReleaseInfo();
CQ qRows = dom["#torrenttable > tbody > tr"];
var qRow = row.Cq();
foreach (var row in qRows)
{
var release = new ReleaseInfo();
var debug = qRow.Html();
var qRow = row.Cq();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
var debug = qRow.Html();
CQ qLink = qRow.Find(".title > a").First();
release.Guid = new Uri(BaseUrl + qLink.Attr("href"));
release.Comments = release.Guid;
release.Title = qLink.Text();
release.Description = release.Title;
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Link = new Uri(BaseUrl + qRow.Find(".quickdownload > a").Attr("href"));
CQ qLink = qRow.Find(".title > a").First();
release.Guid = new Uri(BaseUrl + qLink.Attr("href"));
release.Comments = release.Guid;
release.Title = qLink.Text();
release.Description = release.Title;
var dateString = qRow.Find(".name").First()[0].ChildNodes[4].NodeValue.Replace(" on", "").Trim();
//"2015-04-25 23:38:12"
//"yyyy-MMM-dd hh:mm:ss"
release.PublishDate = DateTime.ParseExact(dateString, "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture);
release.Link = new Uri(BaseUrl + qRow.Find(".quickdownload > a").Attr("href"));
var sizeStringParts = qRow.Children().ElementAt(4).InnerText.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeStringParts[1], ParseUtil.CoerceFloat(sizeStringParts[0]));
var dateString = qRow.Find(".name").First()[0].ChildNodes[4].NodeValue.Replace(" on", "").Trim();
//"2015-04-25 23:38:12"
//"yyyy-MMM-dd hh:mm:ss"
release.PublishDate = DateTime.ParseExact(dateString, "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture);
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".seeders").Text());
release.Peers = release.Seeders + ParseUtil.CoerceInt(qRow.Find(".leechers").Text());
var sizeStringParts = qRow.Children().ElementAt(4).InnerText.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeStringParts[1], ParseUtil.CoerceFloat(sizeStringParts[0]));
release.Seeders = ParseUtil.CoerceInt(qRow.Find(".seeders").Text());
release.Peers = release.Seeders + ParseUtil.CoerceInt(qRow.Find(".leechers").Text());
releases.Add(release);
}
releases.Add(release);
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -33,6 +33,8 @@ namespace Jackett.Indexers
get { return new Uri(BaseUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
const string BaseUrl = "http://torrentshack.me";
const string LoginUrl = BaseUrl + "/login.php";
const string SearchUrl = BaseUrl + "/torrents.php?searchstr={0}&release_type=both&searchtags=&tags_type=0&order_by=s3&order_way=desc&torrent_preset=all&filter_cat%5B600%5D=1&filter_cat%5B620%5D=1&filter_cat%5B700%5D=1&filter_cat%5B981%5D=1&filter_cat%5B980%5D=1";
@ -111,69 +113,67 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var results = await client.GetStringAsync(episodeSearchUrl);
try
{
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString));
var results = await client.GetStringAsync(episodeSearchUrl);
try
CQ dom = results;
var rows = dom["#torrent_table > tbody > tr.torrent"];
foreach (var row in rows)
{
CQ dom = results;
var rows = dom["#torrent_table > tbody > tr.torrent"];
foreach (var row in rows)
CQ qRow = row.Cq();
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qRow.Find(".torrent_name_link").Text();
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + "/" + qRow.Find(".torrent_name_link").Parent().Attr("href"));
release.Comments = release.Guid;
release.Link = new Uri(BaseUrl + "/" + qRow.Find(".torrent_handle_links > a").First().Attr("href"));
var dateStr = qRow.Find(".time").Text().Trim();
if (dateStr.ToLower().Contains("just now"))
release.PublishDate = DateTime.Now;
else
{
CQ qRow = row.Cq();
var release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
release.Title = qRow.Find(".torrent_name_link").Text();
release.Description = release.Title;
release.Guid = new Uri(BaseUrl + "/" + qRow.Find(".torrent_name_link").Parent().Attr("href"));
release.Comments = release.Guid;
release.Link = new Uri(BaseUrl + "/" + qRow.Find(".torrent_handle_links > a").First().Attr("href"));
var dateStr = qRow.Find(".time").Text().Trim();
if (dateStr.ToLower().Contains("just now"))
release.PublishDate = DateTime.Now;
else
{
var dateParts = dateStr.Split(' ');
var dateValue = ParseUtil.CoerceInt(dateParts[0]);
TimeSpan ts = TimeSpan.Zero;
if (dateStr.Contains("Just now"))
ts = TimeSpan.Zero;
else if (dateStr.Contains("sec"))
ts = TimeSpan.FromSeconds(dateValue);
else if (dateStr.Contains("min"))
ts = TimeSpan.FromMinutes(dateValue);
else if (dateStr.Contains("hour"))
ts = TimeSpan.FromHours(dateValue);
else if (dateStr.Contains("day"))
ts = TimeSpan.FromDays(dateValue);
else if (dateStr.Contains("week"))
ts = TimeSpan.FromDays(dateValue * 7);
else if (dateStr.Contains("month"))
ts = TimeSpan.FromDays(dateValue * 30);
else if (dateStr.Contains("year"))
ts = TimeSpan.FromDays(dateValue * 365);
release.PublishDate = DateTime.Now - ts;
}
var sizeStr = qRow.Find(".size")[0].ChildNodes[0].NodeValue.Trim();
var sizeParts = sizeStr.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
release.Seeders = ParseUtil.CoerceInt(qRow.Children().ElementAt(6).InnerText.Trim());
release.Peers = ParseUtil.CoerceInt(qRow.Children().ElementAt(7).InnerText.Trim()) + release.Seeders;
releases.Add(release);
var dateParts = dateStr.Split(' ');
var dateValue = ParseUtil.CoerceInt(dateParts[0]);
TimeSpan ts = TimeSpan.Zero;
if (dateStr.Contains("Just now"))
ts = TimeSpan.Zero;
else if (dateStr.Contains("sec"))
ts = TimeSpan.FromSeconds(dateValue);
else if (dateStr.Contains("min"))
ts = TimeSpan.FromMinutes(dateValue);
else if (dateStr.Contains("hour"))
ts = TimeSpan.FromHours(dateValue);
else if (dateStr.Contains("day"))
ts = TimeSpan.FromDays(dateValue);
else if (dateStr.Contains("week"))
ts = TimeSpan.FromDays(dateValue * 7);
else if (dateStr.Contains("month"))
ts = TimeSpan.FromDays(dateValue * 30);
else if (dateStr.Contains("year"))
ts = TimeSpan.FromDays(dateValue * 365);
release.PublishDate = DateTime.Now - ts;
}
var sizeStr = qRow.Find(".size")[0].ChildNodes[0].NodeValue.Trim();
var sizeParts = sizeStr.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeParts[1], ParseUtil.CoerceFloat(sizeParts[0]));
release.Seeders = ParseUtil.CoerceInt(qRow.Children().ElementAt(6).InnerText.Trim());
release.Peers = ParseUtil.CoerceInt(qRow.Children().ElementAt(7).InnerText.Trim()) + release.Seeders;
releases.Add(release);
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
}
catch (Exception ex)
{
OnResultParsingError(this, results, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -32,6 +32,8 @@ namespace Jackett.Indexers
get { return new Uri(DefaultUrl); }
}
public bool RequiresRageIDLookupDisabled { get { return true; } }
const string DefaultUrl = "https://torrentz.eu";
const string SearchUrl = DefaultUrl + "/feed_verifiedP?f={0}";
string BaseUrl;
@ -104,57 +106,54 @@ namespace Jackett.Indexers
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
foreach (var title in query.ShowTitles ?? new string[] { string.Empty })
var searchString = query.SanitizedSearchTerm + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString.Trim()));
XmlDocument xmlDoc = new XmlDocument();
string xml = string.Empty;
WebClient wc = getWebClient();
try
{
var searchString = title + " " + query.GetEpisodeSearchString();
var episodeSearchUrl = string.Format(SearchUrl, HttpUtility.UrlEncode(searchString.Trim()));
XmlDocument xmlDoc = new XmlDocument();
string xml = string.Empty;
WebClient wc = getWebClient();
try
using (wc)
{
using (wc)
{
xml = wc.DownloadString(episodeSearchUrl);
xmlDoc.LoadXml(xml);
}
ReleaseInfo release;
TorrentzHelper td;
string serie_title;
foreach (XmlNode node in xmlDoc.GetElementsByTagName("item"))
{
release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
serie_title = node.SelectSingleNode("title").InnerText;
release.Title = serie_title;
release.Comments = new Uri(node.SelectSingleNode("link").InnerText);
release.Category = node.SelectSingleNode("category").InnerText;
release.Guid = new Uri(node.SelectSingleNode("guid").InnerText);
release.PublishDate = DateTime.Parse(node.SelectSingleNode("pubDate").InnerText, CultureInfo.InvariantCulture);
td = new TorrentzHelper(node.SelectSingleNode("description").InnerText);
release.Description = td.Description;
release.InfoHash = td.hash;
release.Size = td.Size;
release.Seeders = td.Seeders;
release.Peers = td.Peers + release.Seeders;
release.MagnetUri = TorrentzHelper.createMagnetLink(td.hash, serie_title);
releases.Add(release);
}
xml = wc.DownloadString(episodeSearchUrl);
xmlDoc.LoadXml(xml);
}
catch (Exception ex)
ReleaseInfo release;
TorrentzHelper td;
string serie_title;
foreach (XmlNode node in xmlDoc.GetElementsByTagName("item"))
{
OnResultParsingError(this, xml, ex);
throw ex;
release = new ReleaseInfo();
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800;
serie_title = node.SelectSingleNode("title").InnerText;
release.Title = serie_title;
release.Comments = new Uri(node.SelectSingleNode("link").InnerText);
release.Category = node.SelectSingleNode("category").InnerText;
release.Guid = new Uri(node.SelectSingleNode("guid").InnerText);
release.PublishDate = DateTime.Parse(node.SelectSingleNode("pubDate").InnerText, CultureInfo.InvariantCulture);
td = new TorrentzHelper(node.SelectSingleNode("description").InnerText);
release.Description = td.Description;
release.InfoHash = td.hash;
release.Size = td.Size;
release.Seeders = td.Seeders;
release.Peers = td.Peers + release.Seeders;
release.MagnetUri = TorrentzHelper.createMagnetLink(td.hash, serie_title);
releases.Add(release);
}
}
catch (Exception ex)
{
OnResultParsingError(this, xml, ex);
throw ex;
}
return releases.ToArray();
}

View File

@ -133,9 +133,7 @@
<Compile Include="ResultPage.cs" />
<Compile Include="Server.cs" />
<Compile Include="ServerUtil.cs" />
<Compile Include="SonarApi.cs" />
<Compile Include="TorznabQuery.cs" />
<Compile Include="TVRage.cs" />
<Compile Include="WebApi.cs" />
<Compile Include="CurlHelper.cs" />
<Compile Include="Indexers\AlphaRatio.cs" />
@ -242,7 +240,6 @@
<Content Include="WebContent\jquery-2.1.3.min.js">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>
<Content Include="WebContent\logos\freshon.png">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>
@ -327,4 +324,4 @@
</Properties>
</MonoDevelop>
</ProjectExtensions>
</Project>
</Project>

View File

@ -23,7 +23,6 @@ namespace Jackett
HttpListener listener;
IndexerManager indexerManager;
WebApi webApi;
SonarrApi sonarrApi;
public Server()
@ -34,11 +33,8 @@ namespace Jackett
ReadServerSettingsFile();
LoadApiKey();
indexerManager = new IndexerManager();
sonarrApi = new SonarrApi();
webApi = new WebApi(indexerManager, sonarrApi);
webApi = new WebApi(indexerManager);
}
void LoadApiKey()
@ -206,10 +202,10 @@ namespace Jackett
var torznabQuery = TorznabQuery.FromHttpQuery(query);
if (torznabQuery.RageID != 0)
torznabQuery.ShowTitles = await sonarrApi.GetShowTitle(torznabQuery.RageID);
else if (!string.IsNullOrEmpty(torznabQuery.SearchTerm))
torznabQuery.ShowTitles = new string[] { torznabQuery.SearchTerm };
if (torznabQuery.RageIDLookupEnabled && indexer.RequiresRageIDLookupDisabled)
{
throw new ArgumentException("This indexer requires RageID lookup disabled");
}
var releases = await indexer.PerformQuery(torznabQuery);

View File

@ -1,169 +0,0 @@
using Newtonsoft.Json.Linq;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Text;
using System.Threading.Tasks;
namespace Jackett
{
public class SonarrApi
{
public class ConfigurationSonarr : ConfigurationData
{
public StringItem Host { get; private set; }
public StringItem Port { get; private set; }
public StringItem ApiKey { get; private set; }
DisplayItem ApiInfo;
public ConfigurationSonarr()
{
Host = new StringItem { Name = "Host", Value = "http://localhost" };
Port = new StringItem { Name = "Port", Value = "8989" };
ApiKey = new StringItem { Name = "API Key" };
ApiInfo = new DisplayItem("API Key can be found in Sonarr > Settings > General > Security") { Name = "API Info" };
}
public override Item[] GetItems()
{
return new Item[] { Host, Port, ApiKey, ApiInfo };
}
}
static string SonarrConfigFile = Path.Combine(Program.AppConfigDirectory, "sonarr_api.json");
string Host;
int Port;
string ApiKey;
CookieContainer cookies;
HttpClientHandler handler;
HttpClient client;
ConcurrentDictionary<int, string[]> IdNameMappings;
public SonarrApi()
{
LoadSettings();
cookies = new CookieContainer();
handler = new HttpClientHandler
{
CookieContainer = cookies,
AllowAutoRedirect = true,
UseCookies = true,
};
client = new HttpClient(handler);
IdNameMappings = new ConcurrentDictionary<int, string[]>();
}
async Task ReloadNameMappings(string host, int port, string apiKey)
{
Uri hostUri = new Uri(host);
var queryUrl = string.Format("http://{0}:{1}/api/series?apikey={2}", hostUri.Host, port, apiKey);
var response = await client.GetStringAsync(queryUrl);
var json = JArray.Parse(response);
IdNameMappings.Clear();
foreach (var item in json)
{
var titles = new List<string>();
titles.Add(SanitizeTitle((string)item["title"]));
foreach (var t in item["alternateTitles"])
{
titles.Add(SanitizeTitle((string)t["title"]));
}
IdNameMappings.TryAdd((int)item["tvRageId"], titles.ToArray());
}
}
string SanitizeTitle(string title)
{
char[] arr = title.ToCharArray();
arr = Array.FindAll<char>(arr, c => (char.IsLetterOrDigit(c)
|| char.IsWhiteSpace(c)
|| c == '-'
|| c == '.'
));
title = new string(arr);
return title;
}
void LoadSettings()
{
try
{
if (File.Exists(SonarrConfigFile))
{
var json = JObject.Parse(File.ReadAllText(SonarrConfigFile));
Host = (string)json["host"];
Port = (int)json["port"];
ApiKey = (string)json["api_key"];
}
}
catch (Exception) { }
}
void SaveSettings()
{
JObject json = new JObject();
json["host"] = Host;
json["port"] = Port;
json["api_key"] = ApiKey;
File.WriteAllText(SonarrConfigFile, json.ToString());
}
public ConfigurationSonarr GetConfiguration()
{
var config = new ConfigurationSonarr();
if (ApiKey != null)
{
config.Host.Value = Host;
config.Port.Value = Port.ToString();
config.ApiKey.Value = ApiKey;
}
return config;
}
public async Task ApplyConfiguration(JToken configJson)
{
var config = new ConfigurationSonarr();
config.LoadValuesFromJson(configJson);
await ReloadNameMappings(config.Host.Value, ParseUtil.CoerceInt(config.Port.Value), config.ApiKey.Value);
Host = "http://" + new Uri(config.Host.Value).Host;
Port = ParseUtil.CoerceInt(config.Port.Value);
ApiKey = config.ApiKey.Value;
SaveSettings();
}
public async Task TestConnection()
{
await ReloadNameMappings(Host, Port, ApiKey);
}
public async Task<string[]> GetShowTitle(int rid)
{
if (rid == 0)
return null;
int tries = 0;
while (tries < 2)
{
string[] titles;
if (IdNameMappings.TryGetValue(rid, out titles))
return titles;
await ReloadNameMappings(Host, Port, ApiKey);
tries++;
}
return null;
}
}
}

View File

@ -1,12 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Jackett
{
class TVRage
{
}
}

View File

@ -17,10 +17,12 @@ namespace Jackett
public int Limit { get; private set; }
public int Offset { get; private set; }
public int RageID { get; private set; }
public bool RageIDLookupEnabled { get; private set; }
public int Season { get; private set; }
public string Episode { get; private set; }
public string[] ShowTitles { get; set; }
public string SearchTerm { get; set; }
public string SearchTerm { get; private set; }
public string SanitizedSearchTerm { get; private set; }
public string GetEpisodeSearchString()
{
@ -39,13 +41,39 @@ namespace Jackett
return episodeString;
}
static string SanitizeSearchTerm(string title)
{
char[] arr = title.ToCharArray();
arr = Array.FindAll<char>(arr, c => (char.IsLetterOrDigit(c)
|| char.IsWhiteSpace(c)
|| c == '-'
|| c == '.'
));
title = new string(arr);
return title;
}
public static TorznabQuery FromHttpQuery(NameValueCollection query)
{
//{t=tvsearch&cat=5030%2c5040&extended=1&apikey=test&offset=0&limit=100&rid=24493&season=5&ep=1}
var q = new TorznabQuery();
q.QueryType = query["t"];
q.SearchTerm = query["q"];
if (query["q"] == null)
{
q.SearchTerm = string.Empty;
q.SanitizedSearchTerm = string.Empty;
}
else
{
q.SearchTerm = query["q"];
q.SanitizedSearchTerm = SanitizeSearchTerm(q.SearchTerm);
}
q.RageIDLookupEnabled = query["rid_enabled"] != "0";
if (query["cat"] != null)
{
q.Categories = query["cat"].Split(',');
@ -65,11 +93,17 @@ namespace Jackett
q.Offset = ParseUtil.CoerceInt(query["offset"]);
}
int temp;
if (int.TryParse(query["rid"], out temp))
q.RageID = temp;
if (int.TryParse(query["season"], out temp))
q.Season = temp;
int rageId;
if (int.TryParse(query["rid"], out rageId))
{
q.RageID = rageId;
}
int season;
if (int.TryParse(query["season"], out season))
{
q.Season = season;
}
q.Episode = query["ep"];

View File

@ -27,7 +27,6 @@ namespace Jackett
DeleteIndexer,
GetSonarrConfig,
ApplySonarrConfig,
TestSonarr,
GetJackettConfig,
ApplyJackettConfig,
JackettRestart,
@ -41,19 +40,16 @@ namespace Jackett
{ "delete_indexer", WebApiMethod.DeleteIndexer },
{ "get_sonarr_config", WebApiMethod.GetSonarrConfig },
{ "apply_sonarr_config", WebApiMethod.ApplySonarrConfig },
{ "test_sonarr", WebApiMethod.TestSonarr },
{ "get_jackett_config",WebApiMethod.GetJackettConfig},
{ "apply_jackett_config",WebApiMethod.ApplyJackettConfig},
{ "jackett_restart", WebApiMethod.JackettRestart },
};
IndexerManager indexerManager;
SonarrApi sonarrApi;
public WebApi(IndexerManager indexerManager, SonarrApi sonarrApi)
public WebApi(IndexerManager indexerManager)
{
this.indexerManager = indexerManager;
this.sonarrApi = sonarrApi;
}
public async Task<bool> HandleRequest(HttpListenerContext context)
@ -125,15 +121,6 @@ namespace Jackett
case WebApiMethod.DeleteIndexer:
handlerTask = HandleDeleteIndexer;
break;
case WebApiMethod.GetSonarrConfig:
handlerTask = HandleGetSonarrConfig;
break;
case WebApiMethod.ApplySonarrConfig:
handlerTask = HandleApplySonarrConfig;
break;
case WebApiMethod.TestSonarr:
handlerTask = HandleTestSonarr;
break;
case WebApiMethod.ApplyJackettConfig:
handlerTask = HandleApplyJackettConfig;
break;
@ -164,55 +151,6 @@ namespace Jackett
}
}
async Task<JToken> HandleTestSonarr(HttpListenerContext context)
{
JToken jsonReply = new JObject();
try
{
await sonarrApi.TestConnection();
jsonReply["result"] = "success";
}
catch (Exception ex)
{
jsonReply["result"] = "error";
jsonReply["error"] = ex.Message;
}
return jsonReply;
}
async Task<JToken> HandleApplySonarrConfig(HttpListenerContext context)
{
JToken jsonReply = new JObject();
try
{
var postData = await ReadPostDataJson(context.Request.InputStream);
await sonarrApi.ApplyConfiguration(postData);
jsonReply["result"] = "success";
}
catch (Exception ex)
{
jsonReply["result"] = "error";
jsonReply["error"] = ex.Message;
}
return jsonReply;
}
Task<JToken> HandleGetSonarrConfig(HttpListenerContext context)
{
JObject jsonReply = new JObject();
try
{
jsonReply["config"] = sonarrApi.GetConfiguration().ToJson();
jsonReply["result"] = "success";
}
catch (Exception ex)
{
jsonReply["result"] = "error";
jsonReply["error"] = ex.Message;
}
return Task.FromResult<JToken>(jsonReply);
}
Task<JToken> HandleInvalidApiMethod(HttpListenerContext context)
{
JToken jsonReply = new JObject();
@ -276,7 +214,7 @@ namespace Jackett
jsonReply["api_key"] = ApiKey.CurrentKey;
jsonReply["app_version"] = Assembly.GetExecutingAssembly().GetName().Version.ToString();
JArray items = new JArray();
foreach (var i in indexerManager.Indexers.OrderBy(_=>_.Key))
foreach (var i in indexerManager.Indexers.OrderBy(_ => _.Key))
{
var indexer = i.Value;
var item = new JObject();

View File

@ -2,7 +2,6 @@
reloadIndexers();
loadJackettSettings();
loadSonarrInfo();
function loadJackettSettings() {
getJackettConfig(function (data) {
@ -44,86 +43,6 @@ function getJackettConfig(callback) {
});
}
function loadSonarrInfo() {
getSonarrConfig(function (data) {
$("#sonarr-host").val("");
var host, port, apiKey;
for (var i = 0; i < data.config.length; i++) {
if (data.config[i].id == "host")
host = data.config[i].value;
if (data.config[i].id == "port")
port = data.config[i].value;
if (data.config[i].id == "apikey")
apiKey = data.config[i].value;
}
if (!apiKey)
$("#sonarr-warning").show();
else {
$("#sonarr-warning").hide();
$("#sonarr-host").val(host + ":" + port);
}
});
}
function getSonarrConfig(callback) {
var jqxhr = $.get("get_sonarr_config", function (data) {
callback(data);
}).fail(function () {
doNotify("Error loading Sonarr API configuration, request to Jackett server failed", "danger", "glyphicon glyphicon-alert");
});
}
$("#sonarr-test").click(function () {
var jqxhr = $.get("get_indexers", function (data) {
if (data.result == "error")
doNotify("Test failed for Sonarr API\n" + data.error, "danger", "glyphicon glyphicon-alert");
else
doNotify("Test successful for Sonarr API", "success", "glyphicon glyphicon-ok");
}).fail(function () {
doNotify("Error testing Sonarr, request to Jackett server failed", "danger", "glyphicon glyphicon-alert");
});
});
$("#sonarr-settings").click(function () {
getSonarrConfig(function (data) {
var config = data.config;
var configForm = newConfigModal("Sonarr API", config);
var $goButton = configForm.find(".setup-indexer-go");
$goButton.click(function () {
var data = getConfigModalJson(configForm);
var originalBtnText = $goButton.html();
$goButton.prop('disabled', true);
$goButton.html($('#templates > .spinner')[0].outerHTML);
var jqxhr = $.post("apply_sonarr_config", JSON.stringify(data), function (data) {
if (data.result == "error") {
if (data.config) {
populateSetupForm(data.indexer, data.name, data.config);
}
doNotify("Configuration failed: " + data.error, "danger", "glyphicon glyphicon-alert");
}
else {
configForm.modal("hide");
loadSonarrInfo();
doNotify("Successfully configured Sonarr API", "success", "glyphicon glyphicon-ok");
}
}).fail(function () {
doNotify("Request to Jackett server failed", "danger", "glyphicon glyphicon-alert");
}).always(function () {
$goButton.html(originalBtnText);
$goButton.prop('disabled', false);
});
});
configForm.modal("show");
});
});
function reloadIndexers() {
$('#indexers').hide();
$('#indexers > .indexer').remove();

View File

@ -25,29 +25,18 @@
<hr />
<div class="input-area">
<span class="input-header">Sonarr API Host: </span>
<input id="sonarr-host" class="form-control input-right" type="text" readonly />
<button id="sonarr-settings" class="btn btn-primary btn-sm">
Settings <span class="glyphicon glyphicon-wrench" aria-hidden="true"></span>
</button>
<button id="sonarr-test" class="btn btn-warning btn-sm">
Test <span class="glyphicon glyphicon-screenshot" aria-hidden="true"></span>
</button>
<p id="sonarr-warning" class="alert alert-danger" role="alert">
<span class="glyphicon glyphicon-exclamation-sign"></span>
Sonarr API must be configured
</p>
</div>
<hr />
<div class="input-area">
<p>
To add a Jackett indexer in Sonarr go to <b>Settings > Indexers > Add > Torznab > Custom</b>.
</p>
<h4>Adding a Jackett indexer in Sonarr</h4>
<ol>
<li>In Sonarr go to <b>Settings > Indexers > Add > Torznab > Custom</b></li>
<li>For <b>URL</b> enter the <b>Torznab Host</b> of one of the indexers below</li>
<li>For <b>API key</b> using the key below</li>
<li>Turn off <b>Enable RageID Lookup</b></li>
</ol>
<span class="input-header">Jackett API Key: </span>
<input id="api-key-input" class="form-control input-right" type="text" value="" placeholder="API Key" readonly="">
<p>Use this key when adding indexers to Sonarr. This key works for all indexers.</p>
</div>
<hr />
<div class="input-area">
<span class="input-header">Jackett port: </span>
<input id="jackett-port" class="form-control input-right" type="text" value="" placeholder="9117">
<button id="change-jackett-port" class="btn btn-primary btn-sm">