mirror of
https://github.com/Jackett/Jackett
synced 2025-02-24 23:22:46 +00:00
Implement Shazbat #151
This commit is contained in:
parent
61f78a5367
commit
9437765697
5 changed files with 163 additions and 4 deletions
|
@ -44,6 +44,7 @@ We were previously focused on TV but are working on extending searches to allow
|
||||||
* [RuTor](http://rutor.org/)
|
* [RuTor](http://rutor.org/)
|
||||||
* [SceneAccess](https://sceneaccess.eu/login)
|
* [SceneAccess](https://sceneaccess.eu/login)
|
||||||
* [SceneTime](https://www.scenetime.com/)
|
* [SceneTime](https://www.scenetime.com/)
|
||||||
|
* [Shazbat](www.shazbat.tv/login)
|
||||||
* [ShowRSS](https://showrss.info/)
|
* [ShowRSS](https://showrss.info/)
|
||||||
* [Strike](https://getstrike.net/)
|
* [Strike](https://getstrike.net/)
|
||||||
* [T411](http://www.t411.io/)
|
* [T411](http://www.t411.io/)
|
||||||
|
|
BIN
src/Jackett/Content/logos/shazbat.png
Normal file
BIN
src/Jackett/Content/logos/shazbat.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 3.2 KiB |
|
@ -311,26 +311,27 @@ namespace Jackett.Indexers
|
||||||
return await webclient.GetBytes(request);
|
return await webclient.GetBytes(request);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected async Task<WebClientStringResult> PostDataWithCookies(string url, IEnumerable<KeyValuePair<string, string>> data, string cookieOverride = null)
|
protected async Task<WebClientStringResult> PostDataWithCookies(string url, IEnumerable<KeyValuePair<string, string>> data, string cookieOverride = null, string referer = null)
|
||||||
{
|
{
|
||||||
var request = new Utils.Clients.WebRequest()
|
var request = new Utils.Clients.WebRequest()
|
||||||
{
|
{
|
||||||
Url = url,
|
Url = url,
|
||||||
Type = RequestType.POST,
|
Type = RequestType.POST,
|
||||||
Cookies = cookieOverride ?? CookieHeader,
|
Cookies = cookieOverride ?? CookieHeader,
|
||||||
PostData = data
|
PostData = data,
|
||||||
|
Referer = referer
|
||||||
};
|
};
|
||||||
return await webclient.GetString(request);
|
return await webclient.GetString(request);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected async Task<WebClientStringResult> PostDataWithCookiesAndRetry(string url, IEnumerable<KeyValuePair<string, string>> data, string cookieOverride = null)
|
protected async Task<WebClientStringResult> PostDataWithCookiesAndRetry(string url, IEnumerable<KeyValuePair<string, string>> data, string cookieOverride = null, string referer = null)
|
||||||
{
|
{
|
||||||
Exception lastException = null;
|
Exception lastException = null;
|
||||||
for (int i = 0; i < 3; i++)
|
for (int i = 0; i < 3; i++)
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
return await PostDataWithCookies(url, data, cookieOverride);
|
return await PostDataWithCookies(url, data, cookieOverride, referer);
|
||||||
}
|
}
|
||||||
catch (Exception e)
|
catch (Exception e)
|
||||||
{
|
{
|
||||||
|
|
153
src/Jackett/Indexers/Shazbat.cs
Normal file
153
src/Jackett/Indexers/Shazbat.cs
Normal file
|
@ -0,0 +1,153 @@
|
||||||
|
using CsQuery;
|
||||||
|
using Jackett.Models;
|
||||||
|
using Jackett.Services;
|
||||||
|
using Jackett.Utils;
|
||||||
|
using Jackett.Utils.Clients;
|
||||||
|
using Newtonsoft.Json.Linq;
|
||||||
|
using NLog;
|
||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.IO;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Net;
|
||||||
|
using System.Net.Http;
|
||||||
|
using System.Text;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using System.Web;
|
||||||
|
using Jackett.Models.IndexerConfig;
|
||||||
|
using System.Globalization;
|
||||||
|
using System.Text.RegularExpressions;
|
||||||
|
|
||||||
|
namespace Jackett.Indexers
|
||||||
|
{
|
||||||
|
public class Shazbat : BaseIndexer, IIndexer
|
||||||
|
{
|
||||||
|
private string LoginUrl { get { return SiteLink + "login"; } }
|
||||||
|
private string SearchUrl { get { return SiteLink + "search"; } }
|
||||||
|
private string TorrentsUrl { get { return SiteLink + "torrents"; } }
|
||||||
|
|
||||||
|
new ConfigurationDataBasicLogin configData
|
||||||
|
{
|
||||||
|
get { return (ConfigurationDataBasicLogin)base.configData; }
|
||||||
|
set { base.configData = value; }
|
||||||
|
}
|
||||||
|
|
||||||
|
public Shazbat(IIndexerManagerService i, IWebClient c, Logger l, IProtectionService ps)
|
||||||
|
: base(name: "Shazbat",
|
||||||
|
description: "Modern indexer",
|
||||||
|
link: "http://www.shazbat.tv/",
|
||||||
|
caps: new TorznabCapabilities(TorznabCatType.TV,
|
||||||
|
TorznabCatType.TVHD,
|
||||||
|
TorznabCatType.TVSD),
|
||||||
|
manager: i,
|
||||||
|
client: c,
|
||||||
|
logger: l,
|
||||||
|
p: ps,
|
||||||
|
configData: new ConfigurationDataBasicLogin())
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<IndexerConfigurationStatus> ApplyConfiguration(JToken configJson)
|
||||||
|
{
|
||||||
|
configData.LoadValuesFromJson(configJson);
|
||||||
|
var pairs = new Dictionary<string, string> {
|
||||||
|
{ "referer", "login"},
|
||||||
|
{ "query", ""},
|
||||||
|
{ "tv_login", configData.Username.Value },
|
||||||
|
{ "tv_password", configData.Password.Value },
|
||||||
|
{ "email", "" }
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get cookie
|
||||||
|
var firstRequest = await RequestStringWithCookiesAndRetry(LoginUrl);
|
||||||
|
|
||||||
|
var result = await RequestLoginAndFollowRedirect(LoginUrl, pairs, null, true, null, LoginUrl);
|
||||||
|
await ConfigureIfOK(result.Cookies, result.Content != null && result.Content.Contains("glyphicon-log-out"), () =>
|
||||||
|
{
|
||||||
|
throw new ExceptionWithConfigData("The username and password entered do not match.", configData);
|
||||||
|
});
|
||||||
|
|
||||||
|
return IndexerConfigurationStatus.RequiresTesting;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query)
|
||||||
|
{
|
||||||
|
var releases = new List<ReleaseInfo>();
|
||||||
|
|
||||||
|
var queryString = query.GetQueryString();
|
||||||
|
var url = TorrentsUrl;
|
||||||
|
|
||||||
|
WebClientStringResult results = null;
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(queryString))
|
||||||
|
{
|
||||||
|
var pairs = new Dictionary<string, string> {
|
||||||
|
{ "search", queryString},
|
||||||
|
{ "portlet", "true"}
|
||||||
|
};
|
||||||
|
|
||||||
|
results = await PostDataWithCookiesAndRetry(SearchUrl, pairs, null, TorrentsUrl);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
var pairs = new Dictionary<string, string> {
|
||||||
|
{ "portlet", "true"}
|
||||||
|
};
|
||||||
|
|
||||||
|
results = await PostDataWithCookiesAndRetry(TorrentsUrl, pairs, null, TorrentsUrl);
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
CQ dom = results.Content;
|
||||||
|
var rows = dom["#torrent-table tr"];
|
||||||
|
foreach (var row in rows.Skip(1))
|
||||||
|
{
|
||||||
|
var release = new ReleaseInfo();
|
||||||
|
var qRow = row.Cq();
|
||||||
|
var titleRow = qRow.Find("td:eq(2)").First();
|
||||||
|
titleRow.Children().Remove();
|
||||||
|
release.Title = titleRow.Text().Trim();
|
||||||
|
if (string.IsNullOrWhiteSpace(release.Title))
|
||||||
|
continue;
|
||||||
|
release.Description = release.Title;
|
||||||
|
|
||||||
|
var qLink = row.Cq().Find("td:eq(4) a:eq(0)");
|
||||||
|
release.Link = new Uri(SiteLink + qLink.Attr("href"));
|
||||||
|
release.Guid = release.Link;
|
||||||
|
var qLinkComm = row.Cq().Find("td:eq(4) a:eq(1)");
|
||||||
|
release.Comments = new Uri(SiteLink + qLinkComm.Attr("href"));
|
||||||
|
|
||||||
|
// 07-22-2015 11:08 AM
|
||||||
|
var dateString = qRow.Find(".datetime").Attr("data-timestamp");
|
||||||
|
release.PublishDate = DateTimeUtil.UnixTimestampToDateTime(ParseUtil.CoerceDouble(dateString));
|
||||||
|
var infoString = row.Cq().Find("td:eq(3)").Text();
|
||||||
|
|
||||||
|
release.Size = ParseUtil.CoerceLong(Regex.Match(infoString, "\\((\\d+)\\)").Value.Replace("(","").Replace(")", ""));
|
||||||
|
|
||||||
|
var infosplit = infoString.Replace("/", string.Empty).Split(":".ToCharArray());
|
||||||
|
release.Seeders = ParseUtil.CoerceInt(infosplit[1]);
|
||||||
|
release.Peers = release.Seeders + ParseUtil.CoerceInt(infosplit[2]);
|
||||||
|
|
||||||
|
// var tags = row.Cq().Find(".label-tag").Text(); These don't see to parse - bad tags?
|
||||||
|
|
||||||
|
if(release.Title.Contains("1080p") || release.Title.Contains("720p"))
|
||||||
|
{
|
||||||
|
release.Category = TorznabCatType.TVHD.ID;
|
||||||
|
} else
|
||||||
|
{
|
||||||
|
release.Category = TorznabCatType.TVSD.ID;
|
||||||
|
}
|
||||||
|
|
||||||
|
releases.Add(release);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
OnParseError(results.Content, ex);
|
||||||
|
}
|
||||||
|
|
||||||
|
return releases;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -185,6 +185,7 @@
|
||||||
<Compile Include="Indexers\BitMeTV.cs" />
|
<Compile Include="Indexers\BitMeTV.cs" />
|
||||||
<Compile Include="Indexers\Demonoid.cs" />
|
<Compile Include="Indexers\Demonoid.cs" />
|
||||||
<Compile Include="Indexers\FrenchTorrentDb.cs" />
|
<Compile Include="Indexers\FrenchTorrentDb.cs" />
|
||||||
|
<Compile Include="Indexers\Shazbat.cs" />
|
||||||
<Compile Include="Indexers\NxtGn.cs" />
|
<Compile Include="Indexers\NxtGn.cs" />
|
||||||
<Compile Include="Indexers\Freshon.cs" />
|
<Compile Include="Indexers\Freshon.cs" />
|
||||||
<Compile Include="Indexers\HDSpace.cs" />
|
<Compile Include="Indexers\HDSpace.cs" />
|
||||||
|
@ -473,6 +474,9 @@
|
||||||
<Content Include="Content\logos\scenetime.png">
|
<Content Include="Content\logos\scenetime.png">
|
||||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||||
</Content>
|
</Content>
|
||||||
|
<Content Include="Content\logos\shazbat.png">
|
||||||
|
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||||
|
</Content>
|
||||||
<Content Include="Content\logos\showrss.png">
|
<Content Include="Content\logos\showrss.png">
|
||||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||||
</Content>
|
</Content>
|
||||||
|
|
Loading…
Reference in a new issue