mirror of
https://github.com/Jackett/Jackett
synced 2025-01-04 06:22:45 +00:00
TvStore: refactor and update (#7978)
This commit is contained in:
parent
4dce8f61d9
commit
e1c15f82d2
1 changed files with 198 additions and 255 deletions
|
@ -1,12 +1,11 @@
|
||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
using System.Collections.Specialized;
|
||||||
using System.Globalization;
|
using System.Globalization;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Net;
|
|
||||||
using System.Text;
|
using System.Text;
|
||||||
using System.Text.RegularExpressions;
|
using System.Text.RegularExpressions;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using AngleSharp.Html.Parser;
|
|
||||||
using Jackett.Common.Models;
|
using Jackett.Common.Models;
|
||||||
using Jackett.Common.Models.IndexerConfig.Bespoke;
|
using Jackett.Common.Models.IndexerConfig.Bespoke;
|
||||||
using Jackett.Common.Services.Interfaces;
|
using Jackett.Common.Services.Interfaces;
|
||||||
|
@ -19,75 +18,77 @@ namespace Jackett.Common.Indexers
|
||||||
{
|
{
|
||||||
public class TVstore : BaseWebIndexer
|
public class TVstore : BaseWebIndexer
|
||||||
{
|
{
|
||||||
|
private readonly Dictionary<int, long> _imdbLookup = new Dictionary<int, long>(); // _imdbLookup[internalId] = imdbId
|
||||||
|
|
||||||
|
private readonly Dictionary<long, int>
|
||||||
|
_internalLookup = new Dictionary<long, int>(); // _internalLookup[imdbId] = internalId
|
||||||
|
|
||||||
|
private readonly Regex _seriesInfoMatch = new Regex(
|
||||||
|
@"catl\[\d+\]=(?<seriesID>\d+).*catIM\[\k<seriesID>]='(?<ImdbId>\d+)'", RegexOptions.Compiled);
|
||||||
|
|
||||||
|
private readonly Regex _seriesInfoSearchRegex = new Regex(
|
||||||
|
@"S(?<season>\d{1,3})(?:E(?<episode>\d{1,3}))?$", RegexOptions.IgnoreCase);
|
||||||
|
|
||||||
|
public TVstore(IIndexerConfigurationService configService, WebClient wc, Logger l, IProtectionService ps) :
|
||||||
|
base("TV Store",
|
||||||
|
description: "TV Store is a HUNGARIAN Private Torrent Tracker for TV",
|
||||||
|
link: "https://tvstore.me/",
|
||||||
|
caps: new TorznabCapabilities
|
||||||
|
{
|
||||||
|
SupportsImdbTVSearch = true,
|
||||||
|
SupportsImdbMovieSearch = true // Needed for IMDb searches to work see #7977
|
||||||
|
},
|
||||||
|
configService: configService,
|
||||||
|
client: wc,
|
||||||
|
logger: l,
|
||||||
|
p: ps,
|
||||||
|
configData: new ConfigurationDataTVstore())
|
||||||
|
{
|
||||||
|
Encoding = Encoding.UTF8;
|
||||||
|
Language = "hu-hu";
|
||||||
|
Type = "private";
|
||||||
|
AddCategoryMapping(1, TorznabCatType.TV);
|
||||||
|
AddCategoryMapping(2, TorznabCatType.TVHD);
|
||||||
|
AddCategoryMapping(3, TorznabCatType.TVSD);
|
||||||
|
}
|
||||||
|
|
||||||
private string LoginUrl => SiteLink + "takelogin.php";
|
private string LoginUrl => SiteLink + "takelogin.php";
|
||||||
private string LoginPageUrl => SiteLink + "login.php?returnto=%2F";
|
private string LoginPageUrl => SiteLink + "login.php?returnto=%2F";
|
||||||
private string SearchUrl => SiteLink + "torrent/br_process.php";
|
private string SearchUrl => SiteLink + "torrent/br_process.php";
|
||||||
private string DownloadUrl => SiteLink + "torrent/download.php";
|
private string DownloadUrl => SiteLink + "torrent/download.php";
|
||||||
private string BrowseUrl => SiteLink + "torrent/browse.php";
|
private string BrowseUrl => SiteLink + "torrent/browse.php";
|
||||||
private readonly List<SeriesDetail> series = new List<SeriesDetail>();
|
|
||||||
private readonly Regex _searchStringRegex = new Regex(@"(.+?)S0?(\d+)(E0?(\d+))?$", RegexOptions.IgnoreCase);
|
|
||||||
|
|
||||||
private new ConfigurationDataTVstore configData
|
private new ConfigurationDataTVstore configData => (ConfigurationDataTVstore)base.configData;
|
||||||
{
|
|
||||||
get => (ConfigurationDataTVstore)base.configData;
|
|
||||||
set => base.configData = value;
|
|
||||||
}
|
|
||||||
|
|
||||||
public TVstore(IIndexerConfigurationService configService, Utils.Clients.WebClient wc, Logger l, IProtectionService ps)
|
|
||||||
: base(name: "TVstore",
|
|
||||||
description: "TV Store is a HUNGARIAN Private Torrent Tracker for TV",
|
|
||||||
link: "https://tvstore.me/",
|
|
||||||
caps: new TorznabCapabilities(),
|
|
||||||
configService: configService,
|
|
||||||
client: wc,
|
|
||||||
logger: l,
|
|
||||||
p: ps,
|
|
||||||
configData: new ConfigurationDataTVstore())
|
|
||||||
{
|
|
||||||
Encoding = Encoding.UTF8;
|
|
||||||
Language = "hu-hu";
|
|
||||||
Type = "private";
|
|
||||||
|
|
||||||
TorznabCaps.SupportsImdbTVSearch = true;
|
|
||||||
AddCategoryMapping(1, TorznabCatType.TV);
|
|
||||||
AddCategoryMapping(2, TorznabCatType.TVHD);
|
|
||||||
AddCategoryMapping(3, TorznabCatType.TVSD);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public override async Task<IndexerConfigurationStatus> ApplyConfiguration(JToken configJson)
|
public override async Task<IndexerConfigurationStatus> ApplyConfiguration(JToken configJson)
|
||||||
{
|
{
|
||||||
LoadValuesFromJson(configJson);
|
LoadValuesFromJson(configJson);
|
||||||
|
|
||||||
var loginPage = await RequestStringWithCookies(LoginPageUrl, string.Empty);
|
var loginPage = await RequestStringWithCookies(LoginPageUrl, string.Empty);
|
||||||
var pairs = new Dictionary<string, string> {
|
var pairs = new Dictionary<string, string>
|
||||||
{ "username", configData.Username.Value },
|
{
|
||||||
{ "password", configData.Password.Value },
|
{"username", configData.Username.Value},
|
||||||
{ "back", "%2F" },
|
{"password", configData.Password.Value},
|
||||||
{ "logout", "1"}
|
{"back", "%2F"},
|
||||||
|
{"logout", "1"}
|
||||||
};
|
};
|
||||||
|
|
||||||
var result = await RequestLoginAndFollowRedirect(LoginUrl, pairs, loginPage.Cookies, true, referer: SiteLink);
|
var result = await RequestLoginAndFollowRedirect(LoginUrl, pairs, loginPage.Cookies, true, referer: SiteLink);
|
||||||
await ConfigureIfOK(result.Cookies, result.Content?.Contains("Főoldal") == true, () => throw new ExceptionWithConfigData(
|
await ConfigureIfOK(
|
||||||
$"Error while trying to login with: Username: {configData.Username.Value} Password: {configData.Password.Value}", configData));
|
result.Cookies, result.Content?.Contains("Főoldal") == true,
|
||||||
|
() => throw new ExceptionWithConfigData("Error while trying to login.", configData));
|
||||||
return IndexerConfigurationStatus.RequiresTesting;
|
return IndexerConfigurationStatus.RequiresTesting;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Calculate the Upload Factor for the torrents
|
/// Calculate the Upload Factor for the torrents
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <returns>The calculated factor</returns>
|
/// <returns>The calculated factor</returns>
|
||||||
/// <param name="dateTime">Date time.</param>
|
/// <param name="dateTime">Date time.</param>
|
||||||
/// <param name="type">Type of the torrent (SeasonPack/SingleEpisode).</param>
|
/// <param name="isSeasonPack">Determine if torrent type is season pack or single episode</param>
|
||||||
public double UploadFactorCalculator(DateTime dateTime, string type)
|
private static double UploadFactorCalculator(DateTime dateTime, bool isSeasonPack)
|
||||||
{
|
{
|
||||||
var today = DateTime.Now;
|
var dd = (DateTime.Now - dateTime).Days;
|
||||||
var dd = (today - dateTime).Days;
|
|
||||||
|
|
||||||
/* In case of season Packs */
|
/* In case of season Packs */
|
||||||
if (type.Equals("season"))
|
if (isSeasonPack)
|
||||||
{
|
{
|
||||||
if (dd >= 90)
|
if (dd >= 90)
|
||||||
return 4;
|
return 4;
|
||||||
|
@ -103,95 +104,99 @@ namespace Jackett.Common.Indexers
|
||||||
if (dd >= 30)
|
if (dd >= 30)
|
||||||
return 1.5;
|
return 1.5;
|
||||||
}
|
}
|
||||||
|
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Parses the torrents from the content
|
/// Parses the torrents from the content
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <returns>The parsed torrents.</returns>
|
/// <returns>The parsed torrents.</returns>
|
||||||
/// <param name="results">The result of the query</param>
|
/// <param name="results">The result of the query</param>
|
||||||
/// <param name="query">Query.</param>
|
/// <param name="alreadyFound">Number of the already found torrents.(used for limit)</param>
|
||||||
/// <param name="already_found">Number of the already found torrents.(used for limit)</param>
|
|
||||||
/// <param name="limit">The limit to the number of torrents to download </param>
|
/// <param name="limit">The limit to the number of torrents to download </param>
|
||||||
private async Task<List<ReleaseInfo>> ParseTorrents(WebClientStringResult results, TorznabQuery query, int already_found, int limit, int previously_parsed_on_page)
|
/// <param name="previouslyParsedOnPage">Current position in parsed results</param>
|
||||||
|
private async Task<List<ReleaseInfo>> ParseTorrentsAsync(WebClientStringResult results, int alreadyFound, int limit,
|
||||||
|
int previouslyParsedOnPage)
|
||||||
{
|
{
|
||||||
var releases = new List<ReleaseInfo>();
|
var releases = new List<ReleaseInfo>();
|
||||||
|
var queryParams = new NameValueCollection
|
||||||
|
{
|
||||||
|
{"func", "getToggle"},
|
||||||
|
{"w", "F"},
|
||||||
|
{"pg", "0"}
|
||||||
|
};
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
var content = results.Content;
|
|
||||||
/* Content Looks like this
|
/* Content Looks like this
|
||||||
* 2\15\2\1\1727\207244\1x08 \[WebDL-720p - Eng - AJP69]\gb\2018-03-09 08:11:53\akció, kaland, sci-fi \0\0\1\191170047\1\0\Anonymous\50\0\0\\0\4\0\174\0\
|
* 2\15\2\1\1727\207244\1x08 \[WebDL-720p - Eng - AJP69]\gb\2018-03-09 08:11:53\akció, kaland, sci-fi \0\0\1\191170047\1\0\Anonymous\50\0\0\\0\4\0\174\0\
|
||||||
* 1\ 0\0\1\1727\207243\1x08 \[WebDL-1080p - Eng - AJP69]\gb\2018-03-09 08:11:49\akció, kaland, sci-fi \0\0\1\305729738\1\0\Anonymous\50\0\0\\0\8\0\102\0\0\0\0\1\\\
|
* 1\ 0\0\1\1727\207243\1x08 \[WebDL-1080p - Eng - AJP69]\gb\2018-03-09 08:11:49\akció, kaland, sci-fi \0\0\1\305729738\1\0\Anonymous\50\0\0\\0\8\0\102\0\0\0\0\1\\\
|
||||||
|
* First 3 items per page are total results, results per page, and results this page
|
||||||
|
* There is also a tail of ~4 items after the results for some reason. Looks like \1\\\
|
||||||
*/
|
*/
|
||||||
var parameters = content.Split(new string[] { "\\" }, StringSplitOptions.None);
|
var parameters = results.Content.Split('\\');
|
||||||
var type = "normal";
|
var torrentsThisPage = int.Parse(parameters[2]);
|
||||||
|
var maxTorrents = Math.Min(torrentsThisPage, limit - alreadyFound);
|
||||||
/*
|
var rows = parameters.Skip(3) //Skip pages info
|
||||||
* Split the releases by '\' and go through them.
|
.Select((str, index) => (index, str)) //Index each string for grouping
|
||||||
* 27 element belongs to one torrent
|
.GroupBy(n => n.index / 27) // each torrent is divided into 27 parts
|
||||||
*/
|
.Skip(previouslyParsedOnPage).Take(maxTorrents)// only parse the rows we want
|
||||||
for (var j = previously_parsed_on_page * 27; (j + 27 < parameters.Length && ((already_found + releases.Count) < limit)); j = j + 27)
|
//Convert above query into a List<string>(27) in prep for parsing
|
||||||
|
.Select(entry => entry.Select(item => item.str).ToList());
|
||||||
|
foreach (var row in rows)
|
||||||
{
|
{
|
||||||
var release = new ReleaseInfo();
|
var torrentId = row[(int)TorrentParts.TorrentId];
|
||||||
|
var downloadLink = new Uri(DownloadUrl + "?id=" + torrentId);
|
||||||
var imdb_id = 4 + j;
|
var imdbId = _imdbLookup.TryGetValue(int.Parse(row[(int)TorrentParts.InternalId]), out var imdb)
|
||||||
var torrent_id = 5 + j;
|
? (long?)imdb
|
||||||
var is_season_id = 6 + j;
|
: null;
|
||||||
var publish_date_id = 9 + j;
|
var files = int.Parse(row[(int)TorrentParts.Files]);
|
||||||
var files_id = 13 + j;
|
var size = long.Parse(row[(int)TorrentParts.SizeBytes]);
|
||||||
var size_id = 14 + j;
|
var seeders = int.Parse(row[(int)TorrentParts.Seeders]);
|
||||||
var seeders_id = 23;
|
var leechers = int.Parse(row[(int)TorrentParts.Leechers]);
|
||||||
var peers_id = 24 + j;
|
var grabs = int.Parse(row[(int)TorrentParts.Grabs]);
|
||||||
var grabs_id = 25 + j;
|
var publishDate = DateTime.Parse(row[(int)TorrentParts.PublishDate]);
|
||||||
|
var isSeasonPack = row[(int)TorrentParts.EpisodeInfo].Contains("évad");
|
||||||
|
queryParams["id"] = torrentId;
|
||||||
type = "normal";
|
queryParams["now"] = DateTimeUtil.DateTimeToUnixTimestamp(DateTime.UtcNow)
|
||||||
//IMDB id of the series
|
.ToString(CultureInfo.InvariantCulture);
|
||||||
var seriesinfo = series.Find(x => x.id.Contains(parameters[imdb_id]));
|
var filesList = (await RequestStringWithCookiesAndRetry(SearchUrl + "?" + queryParams.GetQueryString()))
|
||||||
if (seriesinfo != null && !parameters[imdb_id].Equals(""))
|
.Content;
|
||||||
release.Imdb = long.Parse(seriesinfo.imdbid);
|
var firstFileName = filesList.Split(
|
||||||
|
new[]
|
||||||
//ID of the torrent
|
{
|
||||||
var unixTimestamp = (int)(DateTime.UtcNow.Subtract(new DateTime(1970, 1, 1))).TotalSeconds;
|
@"\\"
|
||||||
|
}, StringSplitOptions.None)[1];
|
||||||
var fileinfoURL = SearchUrl + "?func=getToggle&id=" + parameters[torrent_id] + "&w=F&pg=0&now=" + unixTimestamp;
|
// Delete the file extension. Many first files are either mkv or nfo.
|
||||||
var fileinfo = (await RequestStringWithCookiesAndRetry(fileinfoURL)).Content;
|
// Cannot confirm these are the only extensions, so generic remove all 3 char extensions at end of section.
|
||||||
release.Link = new Uri(DownloadUrl + "?id=" + parameters[torrent_id]);
|
firstFileName = Regex.Replace(firstFileName, @"\.\w{3}$", string.Empty);
|
||||||
release.Guid = release.Link;
|
if (isSeasonPack)
|
||||||
release.Comments = release.Link;
|
firstFileName = Regex.Replace(
|
||||||
var fileinf = fileinfo.Split(new string[] { "\\\\" }, StringSplitOptions.None);
|
firstFileName, @"(?<=S\d+)E\d{2,3}", string.Empty, RegexOptions.IgnoreCase);
|
||||||
if (fileinf.Length > 1)
|
var category = new[]
|
||||||
{
|
{
|
||||||
release.Title = fileinf[1];
|
TvCategoryParser.ParseTvShowQuality(firstFileName)
|
||||||
if (fileinf[1].Length > 5 && fileinf[1].Substring(fileinf[1].Length - 4).Contains("."))
|
};
|
||||||
release.Title = fileinf[1].Substring(0, fileinf[1].Length - 4);
|
var release = new ReleaseInfo
|
||||||
}
|
|
||||||
// SeasonPack check
|
|
||||||
if (parameters[is_season_id].Contains("évad/"))
|
|
||||||
{
|
{
|
||||||
type = "season";
|
Title = firstFileName,
|
||||||
// If this is a seasonpack, remove episode nunmber from title.
|
Link = downloadLink,
|
||||||
release.Title = Regex.Replace(release.Title, "s0?(\\d+)(e0?(\\d+))", "S$1", RegexOptions.IgnoreCase);
|
Guid = downloadLink,
|
||||||
}
|
PublishDate = publishDate,
|
||||||
|
Files = files,
|
||||||
release.PublishDate = DateTime.Parse(parameters[publish_date_id], CultureInfo.InvariantCulture);
|
Size = size,
|
||||||
release.Files = int.Parse(parameters[files_id]);
|
Category = category,
|
||||||
release.Size = long.Parse(parameters[size_id]);
|
Seeders = seeders,
|
||||||
release.Seeders = int.Parse(parameters[seeders_id]);
|
Peers = leechers + seeders,
|
||||||
release.Peers = (int.Parse(parameters[peers_id]) + release.Seeders);
|
Grabs = grabs,
|
||||||
release.Grabs = int.Parse(parameters[grabs_id]);
|
MinimumRatio = 1,
|
||||||
release.MinimumRatio = 1;
|
MinimumSeedTime = 172800, // 48 hours
|
||||||
release.MinimumSeedTime = 172800; // 48 hours
|
DownloadVolumeFactor = 1,
|
||||||
release.DownloadVolumeFactor = 1;
|
UploadVolumeFactor = UploadFactorCalculator(publishDate, isSeasonPack),
|
||||||
release.UploadVolumeFactor = UploadFactorCalculator(release.PublishDate, type);
|
Imdb = imdbId
|
||||||
release.Category = new List<int> { TvCategoryParser.ParseTvShowQuality(release.Title) };
|
};
|
||||||
if ((already_found + releases.Count) < limit)
|
releases.Add(release);
|
||||||
releases.Add(release);
|
|
||||||
else
|
|
||||||
return releases;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
|
@ -200,181 +205,119 @@ namespace Jackett.Common.Indexers
|
||||||
|
|
||||||
return releases;
|
return releases;
|
||||||
}
|
}
|
||||||
/* Search is possible only based by Series ID.
|
|
||||||
* All known series ID is on main page, with their attributes. (ID, EngName, HunName, imdbid)
|
|
||||||
*/
|
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Get all series info known by site
|
/// Map internally used series info to its corresponding IMDB number.
|
||||||
/// These are:
|
/// Saves this data into 2 dictionaries for easy lookup from one value to the other
|
||||||
/// - Series ID
|
|
||||||
/// - Hungarian name
|
|
||||||
/// - English name
|
|
||||||
/// - IMDB ID
|
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <returns>The series info.</returns>
|
private async Task PopulateImdbMapAsync()
|
||||||
protected async Task<bool> GetSeriesInfo()
|
|
||||||
{
|
{
|
||||||
|
|
||||||
var result = await RequestStringWithCookiesAndRetry(BrowseUrl);
|
var result = await RequestStringWithCookiesAndRetry(BrowseUrl);
|
||||||
|
foreach (Match match in _seriesInfoMatch.Matches(result.Content))
|
||||||
var parser = new HtmlParser();
|
|
||||||
var dom = parser.ParseDocument(result.Content);
|
|
||||||
var scripts = dom.QuerySelectorAll("script");
|
|
||||||
//TODO Linq
|
|
||||||
foreach (var script in scripts)
|
|
||||||
{
|
{
|
||||||
if (script.TextContent.Contains("catsh=Array"))
|
var internalId = int.Parse(match.Groups["seriesID"].Value);
|
||||||
{
|
var imdbId = long.Parse(match.Groups["ImdbId"].Value);
|
||||||
//TODO no regex in pattern, investigate using string.Split instead?
|
_imdbLookup[internalId] = imdbId;
|
||||||
var seriesKnowBySite = Regex.Split(script.TextContent, "catl");
|
_internalLookup[imdbId] = internalId;
|
||||||
//TODO consider converting to foreach
|
|
||||||
for (var i = 1; i < seriesKnowBySite.Length; i++)
|
|
||||||
{
|
|
||||||
var id = seriesKnowBySite[i];
|
|
||||||
var seriesElement = WebUtility.HtmlDecode(id).Split(';');
|
|
||||||
var hungarianName = seriesElement[1].Split('=')[1].Trim('\'').ToLower();
|
|
||||||
var englishName = seriesElement[2].Split('=')[1].Trim('\'').ToLower();
|
|
||||||
var seriesId = seriesElement[0].Split('=')[1].Trim('\'');
|
|
||||||
var imdbId = seriesElement[7].Split('=')[1].Trim('\'');
|
|
||||||
var seriesDetail = new SeriesDetail
|
|
||||||
{
|
|
||||||
HunName = hungarianName,
|
|
||||||
EngName = englishName,
|
|
||||||
id = seriesId,
|
|
||||||
imdbid = imdbId
|
|
||||||
};
|
|
||||||
series.Add(seriesDetail);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected override async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query)
|
protected override async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query)
|
||||||
{
|
{
|
||||||
//TODO convert to initializer
|
|
||||||
var releases = new List<ReleaseInfo>();
|
var releases = new List<ReleaseInfo>();
|
||||||
|
if (!_imdbLookup.Any())
|
||||||
// If series from sites are indexed then we don't need to reindex them.
|
await PopulateImdbMapAsync();
|
||||||
if (series?.Any() != true)
|
var queryParams = new NameValueCollection
|
||||||
await GetSeriesInfo();
|
{
|
||||||
var unixTimestamp = (int)(DateTime.UtcNow.Subtract(new DateTime(1970, 1, 1))).TotalSeconds;
|
{"now", DateTimeUtil.DateTimeToUnixTimestamp(DateTime.UtcNow).ToString(CultureInfo.InvariantCulture)},
|
||||||
|
{"p", "1"}
|
||||||
WebClientStringResult results;
|
};
|
||||||
|
if (query.Limit == 0)
|
||||||
var searchString = "";
|
query.Limit = 100;
|
||||||
var exactSearchURL = "";
|
|
||||||
var page = 1;
|
|
||||||
SeriesDetail seriesinfo = null;
|
|
||||||
var base64coded = "";
|
|
||||||
var noimdbmatch = false;
|
|
||||||
var limit = query.Limit;
|
|
||||||
if (limit == 0)
|
|
||||||
limit = 100;
|
|
||||||
if (query.IsImdbQuery)
|
if (query.IsImdbQuery)
|
||||||
{
|
{
|
||||||
seriesinfo = series.Find(x => x.imdbid.Equals(query.ImdbIDShort));
|
if (!string.IsNullOrEmpty(query.ImdbIDShort) && _internalLookup.TryGetValue(
|
||||||
if (seriesinfo != null && !query.ImdbIDShort.Equals(""))
|
long.Parse(query.ImdbIDShort), out var internalId))
|
||||||
{
|
queryParams.Add("g", internalId.ToString());
|
||||||
var querrySeason = "";
|
|
||||||
if (query.Season != 0)
|
|
||||||
querrySeason = query.Season.ToString();
|
|
||||||
exactSearchURL = SearchUrl + "?s=" + querrySeason + "&e=" + query.Episode + "&g=" + seriesinfo.id + "&now=" + unixTimestamp.ToString();
|
|
||||||
}
|
|
||||||
else
|
else
|
||||||
{
|
return Enumerable.Empty<ReleaseInfo>();
|
||||||
// IMDB_ID was not founded in site database.
|
|
||||||
return releases;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
if (!query.IsImdbQuery || noimdbmatch)
|
else
|
||||||
{
|
{
|
||||||
/* SearchString format is the following: Seriesname 1X09 */
|
queryParams.Add("g", "0");
|
||||||
if (query.SearchTerm != null && !query.SearchTerm.Equals(""))
|
if (!string.IsNullOrWhiteSpace(query.SearchTerm))
|
||||||
{
|
{
|
||||||
searchString += query.SanitizedSearchTerm;
|
var searchString = query.SanitizedSearchTerm;
|
||||||
// convert SnnEnn to nnxnn for dashboard searches
|
if (query.Season == 0 && string.IsNullOrWhiteSpace(query.Episode))
|
||||||
if (query.Season == 0 && (query.Episode == null || query.Episode.Equals("")))
|
|
||||||
{
|
{
|
||||||
var searchMatch = _searchStringRegex.Match(searchString);
|
//Jackett doesn't check for lowercase s00e00 so do it here.
|
||||||
|
var searchMatch = _seriesInfoSearchRegex.Match(searchString);
|
||||||
if (searchMatch.Success)
|
if (searchMatch.Success)
|
||||||
{
|
{
|
||||||
query.Season = int.Parse(searchMatch.Groups[2].Value);
|
query.Season = int.Parse(searchMatch.Groups["season"].Value);
|
||||||
query.Episode = searchMatch.Groups[4].Success ? string.Format("{0:00}", (int?)int.Parse(searchMatch.Groups[4].Value)) : null;
|
query.Episode = searchMatch.Groups["episode"].Success
|
||||||
searchString = searchMatch.Groups[1].Value; // strip SnnEnn
|
? $"{int.Parse(searchMatch.Groups["episode"].Value):00}"
|
||||||
|
: null;
|
||||||
|
query.SearchTerm = searchString.Remove(searchMatch.Index, searchMatch.Length).Trim(); // strip SnnEnn
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (query.Season != 0)
|
|
||||||
searchString += " " + query.Season.ToString();
|
|
||||||
if (query.Episode != null && !query.Episode.Equals(""))
|
|
||||||
searchString += string.Format("x{0:00}", int.Parse(query.Episode));
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
// if searchquery is empty this is a test, so shorten the response time
|
|
||||||
limit = 20;
|
|
||||||
}
|
}
|
||||||
|
else if (query.IsTest)
|
||||||
|
query.Limit = 20;
|
||||||
|
|
||||||
/* Search string must be converted to Base64 */
|
// Search string must be converted to Base64
|
||||||
var plainTextBytes = System.Text.Encoding.UTF8.GetBytes(searchString);
|
var plainTextBytes = Encoding.UTF8.GetBytes(query.SanitizedSearchTerm);
|
||||||
base64coded = System.Convert.ToBase64String(plainTextBytes);
|
queryParams.Add("c", Convert.ToBase64String(plainTextBytes));
|
||||||
|
|
||||||
|
|
||||||
exactSearchURL = SearchUrl + "?gyors=" + base64coded + "&p=" + page + "&now=" + unixTimestamp.ToString();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/*Start search*/
|
if (query.Season != 0)
|
||||||
results = await RequestStringWithCookiesAndRetry(exactSearchURL);
|
{
|
||||||
|
queryParams.Add("s", query.Season.ToString());
|
||||||
|
if (!string.IsNullOrWhiteSpace(query.Episode))
|
||||||
|
queryParams.Add("e", query.Episode);
|
||||||
|
}
|
||||||
|
|
||||||
/* Parse page Information from result */
|
var results = await RequestStringWithCookiesAndRetry(SearchUrl + "?" + queryParams.GetQueryString());
|
||||||
|
// Parse page Information from result
|
||||||
var content = results.Content;
|
var content = results.Content;
|
||||||
var splits = content.Split('\\');
|
var splits = content.Split('\\');
|
||||||
var max_found = int.Parse(splits[0]);
|
var totalFound = int.Parse(splits[0]);
|
||||||
var torrent_per_page = int.Parse(splits[1]);
|
var torrentPerPage = int.Parse(splits[1]);
|
||||||
|
if (totalFound == 0 || query.Offset > totalFound)
|
||||||
|
return Enumerable.Empty<ReleaseInfo>();
|
||||||
if (torrent_per_page == 0)
|
var startPage = query.Offset / torrentPerPage + 1;
|
||||||
return releases;
|
var previouslyParsedOnPage = query.Offset % torrentPerPage;
|
||||||
var start_page = (query.Offset / torrent_per_page) + 1;
|
var pages = totalFound / torrentPerPage + 1;
|
||||||
var previously_parsed_on_page = query.Offset - (start_page * torrent_per_page) + 1; //+1 because indexing start from 0
|
// First page content is already ready
|
||||||
if (previously_parsed_on_page <= 0)
|
if (startPage == 1)
|
||||||
previously_parsed_on_page = query.Offset;
|
|
||||||
|
|
||||||
|
|
||||||
var pages = Math.Ceiling(max_found / (double)torrent_per_page);
|
|
||||||
|
|
||||||
/* First page content is already ready */
|
|
||||||
if (start_page == 1)
|
|
||||||
{
|
{
|
||||||
releases.AddRange(await ParseTorrents(results, query, releases.Count, limit, previously_parsed_on_page));
|
releases.AddRange(await ParseTorrentsAsync(results, releases.Count, query.Limit, previouslyParsedOnPage));
|
||||||
previously_parsed_on_page = 0;
|
previouslyParsedOnPage = 0;
|
||||||
start_page++;
|
startPage++;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (page = start_page; (page <= pages && releases.Count < limit); page++)
|
for (var page = startPage; page <= pages && releases.Count < query.Limit; page++)
|
||||||
{
|
{
|
||||||
if (query.IsImdbQuery && seriesinfo != null)
|
queryParams["page"] = page.ToString();
|
||||||
exactSearchURL = SearchUrl + "?s=" + query.Season + "&e=" + query.Episode + "&g=" + seriesinfo.id + "&p=" + page + "&now=" + unixTimestamp.ToString();
|
results = await RequestStringWithCookiesAndRetry(SearchUrl + "?" + queryParams.GetQueryString());
|
||||||
else
|
releases.AddRange(await ParseTorrentsAsync(results, releases.Count, query.Limit, previouslyParsedOnPage));
|
||||||
exactSearchURL = SearchUrl + "?gyors=" + base64coded + "&p=" + page + "&now=" + unixTimestamp.ToString();
|
previouslyParsedOnPage = 0;
|
||||||
results = await RequestStringWithCookiesAndRetry(exactSearchURL);
|
|
||||||
releases.AddRange(await ParseTorrents(results, query, releases.Count, limit, previously_parsed_on_page));
|
|
||||||
previously_parsed_on_page = 0;
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return releases;
|
return releases;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
public class SeriesDetail
|
|
||||||
{
|
|
||||||
public string id;
|
|
||||||
public string HunName;
|
|
||||||
public string EngName;
|
|
||||||
public string imdbid;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
private enum TorrentParts
|
||||||
|
{
|
||||||
|
InternalId = 1,
|
||||||
|
TorrentId = 2,
|
||||||
|
EpisodeInfo = 3,
|
||||||
|
PublishDate = 6,
|
||||||
|
Files = 10,
|
||||||
|
SizeBytes = 11,
|
||||||
|
Seeders = 20,
|
||||||
|
Leechers = 21,
|
||||||
|
Grabs = 22
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue