mirror of
https://github.com/Jackett/Jackett
synced 2025-02-23 14:51:01 +00:00
* add offset support to ncore indexer * small correction related to add .hun tag to torrents * DivideByZeroException at torrent_per_page
This commit is contained in:
parent
8dc832d64a
commit
d7af20d4f6
1 changed files with 22 additions and 6 deletions
|
@ -117,7 +117,7 @@ namespace Jackett.Common.Indexers
|
|||
return IndexerConfigurationStatus.RequiresTesting;
|
||||
}
|
||||
|
||||
List<ReleaseInfo> parseTorrents(WebClientStringResult results, String seasonep, TorznabQuery query, int already_founded, int limit)
|
||||
List<ReleaseInfo> parseTorrents(WebClientStringResult results, String seasonep, TorznabQuery query, int already_founded, int limit, int previously_parsed_on_page)
|
||||
{
|
||||
var releases = new List<ReleaseInfo>();
|
||||
try
|
||||
|
@ -128,7 +128,7 @@ namespace Jackett.Common.Indexers
|
|||
var rows = dom[".box_torrent_all"].Find(".box_torrent");
|
||||
|
||||
// Check torrents only till we reach the query Limit
|
||||
for(int i=0; (i<rows.Length && ((already_founded + releases.Count) < limit )); i++)
|
||||
for(int i= previously_parsed_on_page; (i<rows.Length && ((already_founded + releases.Count) < limit )); i++)
|
||||
{
|
||||
try
|
||||
{
|
||||
|
@ -172,7 +172,7 @@ namespace Jackett.Common.Indexers
|
|||
release.Category = MapTrackerCatToNewznab(cat);
|
||||
|
||||
/* if the release name not contains the language we add it because it is know from category */
|
||||
if (cat.Contains("hun") && !release.Title.Contains("hun"))
|
||||
if (cat.Contains("hun") && !release.Title.ToLower().Contains("hun"))
|
||||
release.Title += ".hun";
|
||||
|
||||
if (seasonep == null)
|
||||
|
@ -272,6 +272,15 @@ namespace Jackett.Common.Indexers
|
|||
CQ dom = results.Content;
|
||||
int numVal = 0;
|
||||
|
||||
// find number of torrents / page
|
||||
int torrent_per_page = dom[".box_torrent_all"].Find(".box_torrent").Length;
|
||||
if (torrent_per_page==0)
|
||||
return releases;
|
||||
int start_page = (query.Offset / torrent_per_page)+1;
|
||||
int previously_parsed_on_page = query.Offset - (start_page * torrent_per_page) + 1; //+1 because indexing start from 0
|
||||
if (previously_parsed_on_page < 0)
|
||||
previously_parsed_on_page = query.Offset;
|
||||
|
||||
// find pagelinks in the bottom
|
||||
var pagelinks = dom["div[id=pager_bottom]"].Find("a");
|
||||
if (pagelinks.Length > 0)
|
||||
|
@ -293,15 +302,22 @@ namespace Jackett.Common.Indexers
|
|||
if (limit == 0)
|
||||
limit = 100;
|
||||
|
||||
releases = parseTorrents(results, seasonep, query, releases.Count, limit);
|
||||
if (start_page == 1)
|
||||
{
|
||||
releases = parseTorrents(results, seasonep, query, releases.Count, limit, previously_parsed_on_page);
|
||||
previously_parsed_on_page = 0;
|
||||
start_page++;
|
||||
}
|
||||
|
||||
|
||||
// Check all the pages for the torrents.
|
||||
// The starting index is 2. (the first one is the original where we parse out the pages.)
|
||||
for (int i=2; (i<= numVal && releases.Count < limit); i++ )
|
||||
for (int i= start_page; (i<= numVal && releases.Count < limit); i++ )
|
||||
{
|
||||
pairs.Add(new KeyValuePair<string, string>("oldal", i.ToString()));
|
||||
results = await PostDataWithCookiesAndRetry(SearchUrl, pairs);
|
||||
releases.AddRange(parseTorrents(results, seasonep, query, releases.Count, limit));
|
||||
releases.AddRange(parseTorrents(results, seasonep, query, releases.Count, limit, previously_parsed_on_page));
|
||||
previously_parsed_on_page = 0;
|
||||
pairs.Remove(new KeyValuePair<string, string>("oldal", i.ToString()));
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue