mirror of https://github.com/Jackett/Jackett
commit
f02404cd45
|
@ -0,0 +1,36 @@
|
||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Text;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
|
||||||
|
namespace Jackett
|
||||||
|
{
|
||||||
|
public class CachedResult
|
||||||
|
{
|
||||||
|
private List<ReleaseInfo> results;
|
||||||
|
private DateTime created;
|
||||||
|
private string query;
|
||||||
|
|
||||||
|
public CachedResult(string query, List<ReleaseInfo> results){
|
||||||
|
this.results = results;
|
||||||
|
created = DateTime.Now;
|
||||||
|
this.query = query;
|
||||||
|
}
|
||||||
|
|
||||||
|
public IReadOnlyList<ReleaseInfo> Results
|
||||||
|
{
|
||||||
|
get { return results.AsReadOnly(); }
|
||||||
|
}
|
||||||
|
|
||||||
|
public DateTime Created
|
||||||
|
{
|
||||||
|
get { return created; }
|
||||||
|
}
|
||||||
|
|
||||||
|
public string Query
|
||||||
|
{
|
||||||
|
get { return query; }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,27 @@
|
||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Text;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
|
||||||
|
namespace Jackett
|
||||||
|
{
|
||||||
|
public class ConfigurationDataBasicLoginAnimeBytes : ConfigurationDataBasicLogin
|
||||||
|
{
|
||||||
|
public BoolItem IncludeRaw { get; private set; }
|
||||||
|
public DisplayItem RageIdWarning { get; private set; }
|
||||||
|
public DisplayItem DateWarning { get; private set; }
|
||||||
|
|
||||||
|
public ConfigurationDataBasicLoginAnimeBytes(): base()
|
||||||
|
{
|
||||||
|
IncludeRaw = new BoolItem() { Name = "IncludeRaw", Value = false };
|
||||||
|
RageIdWarning = new DisplayItem("Ensure rageid lookup is disabled in Sonarr for this tracker.") { Name = "RageWarning" };
|
||||||
|
DateWarning = new DisplayItem("This tracker does not supply upload dates so they are based off year of release.") { Name = "DateWarning" };
|
||||||
|
}
|
||||||
|
|
||||||
|
public override Item[] GetItems()
|
||||||
|
{
|
||||||
|
return new Item[] { Username, Password, IncludeRaw, RageIdWarning, DateWarning };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,387 @@
|
||||||
|
using CsQuery;
|
||||||
|
using Newtonsoft.Json.Linq;
|
||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Globalization;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Net;
|
||||||
|
using System.Net.Http;
|
||||||
|
using System.Security.Cryptography;
|
||||||
|
using System.Text;
|
||||||
|
using System.Text.RegularExpressions;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using System.Web;
|
||||||
|
|
||||||
|
namespace Jackett.Indexers
|
||||||
|
{
|
||||||
|
public class AnimeBytes : IndexerInterface
|
||||||
|
{
|
||||||
|
private static List<CachedResult> cache = new List<CachedResult>();
|
||||||
|
private static readonly TimeSpan cacheTime = new TimeSpan(0, 9, 0);
|
||||||
|
|
||||||
|
public event Action<IndexerInterface, string, Exception> OnResultParsingError;
|
||||||
|
public event Action<IndexerInterface, JToken> OnSaveConfigurationRequested;
|
||||||
|
|
||||||
|
static string chromeUserAgent = "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36";
|
||||||
|
|
||||||
|
public string DisplayName
|
||||||
|
{
|
||||||
|
get { return "AnimeBytes"; }
|
||||||
|
}
|
||||||
|
|
||||||
|
public string DisplayDescription
|
||||||
|
{
|
||||||
|
get { return "The web's best Chinese cartoons"; }
|
||||||
|
}
|
||||||
|
|
||||||
|
public Uri SiteLink
|
||||||
|
{
|
||||||
|
get { return new Uri(BaseUrl); }
|
||||||
|
}
|
||||||
|
|
||||||
|
const string BaseUrl = "https://animebytes.tv";
|
||||||
|
const string LoginUrl = BaseUrl + "/user/login";
|
||||||
|
const string SearchUrl = BaseUrl + "/torrents.php?filter_cat[1]=1";
|
||||||
|
|
||||||
|
public bool IsConfigured { get; private set; }
|
||||||
|
public bool AllowRaws { get; private set; }
|
||||||
|
|
||||||
|
|
||||||
|
CookieContainer cookieContainer;
|
||||||
|
HttpClientHandler handler;
|
||||||
|
HttpClient client;
|
||||||
|
|
||||||
|
public AnimeBytes()
|
||||||
|
{
|
||||||
|
IsConfigured = false;
|
||||||
|
cookieContainer = new CookieContainer();
|
||||||
|
handler = new HttpClientHandler
|
||||||
|
{
|
||||||
|
CookieContainer = cookieContainer,
|
||||||
|
AllowAutoRedirect = false,
|
||||||
|
UseCookies = true,
|
||||||
|
};
|
||||||
|
client = new HttpClient(handler);
|
||||||
|
client.DefaultRequestHeaders.Add("User-Agent", chromeUserAgent);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Task<ConfigurationData> GetConfigurationForSetup()
|
||||||
|
{
|
||||||
|
var config = new ConfigurationDataBasicLoginAnimeBytes();
|
||||||
|
return Task.FromResult<ConfigurationData>(config);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task ApplyConfiguration(JToken configJson)
|
||||||
|
{
|
||||||
|
var config = new ConfigurationDataBasicLoginAnimeBytes();
|
||||||
|
config.LoadValuesFromJson(configJson);
|
||||||
|
|
||||||
|
|
||||||
|
// Get the login form as we need the CSRF Token
|
||||||
|
var loginPage = await client.GetAsync(LoginUrl);
|
||||||
|
CQ loginPageDom = await loginPage.Content.ReadAsStringAsync();
|
||||||
|
var csrfToken = loginPageDom["input[name=\"csrf_token\"]"].Last();
|
||||||
|
|
||||||
|
// Build login form
|
||||||
|
var pairs = new Dictionary<string, string> {
|
||||||
|
{ "csrf_token", csrfToken.Attr("value") },
|
||||||
|
{ "username", config.Username.Value },
|
||||||
|
{ "password", config.Password.Value },
|
||||||
|
{ "keeplogged_sent", "true" },
|
||||||
|
{ "keeplogged", "on" },
|
||||||
|
{ "login", "Log In!" }
|
||||||
|
};
|
||||||
|
|
||||||
|
var content = new FormUrlEncodedContent(pairs);
|
||||||
|
|
||||||
|
// Do the login
|
||||||
|
var response = await client.PostAsync(LoginUrl, content);
|
||||||
|
var responseContent = await response.Content.ReadAsStringAsync();
|
||||||
|
|
||||||
|
// Compatiblity issue between the cookie format and httpclient
|
||||||
|
// Pull it out manually ignoring the expiry date then set it manually
|
||||||
|
// http://stackoverflow.com/questions/14681144/httpclient-not-storing-cookies-in-cookiecontainer
|
||||||
|
IEnumerable<string> cookies;
|
||||||
|
if (response.Headers.TryGetValues("set-cookie", out cookies))
|
||||||
|
{
|
||||||
|
foreach (var c in cookies)
|
||||||
|
{
|
||||||
|
cookieContainer.SetCookies(new Uri(BaseUrl), c.Substring(0, c.LastIndexOf(';')));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
foreach (Cookie cookie in cookieContainer.GetCookies(new Uri(BaseUrl)))
|
||||||
|
{
|
||||||
|
if (cookie.Name == "session")
|
||||||
|
{
|
||||||
|
cookie.Expires = DateTime.Now.AddDays(360);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the home page now we are logged in as AllowAutoRedirect is false as we needed to get the cookie manually.
|
||||||
|
response = await client.GetAsync(BaseUrl);
|
||||||
|
responseContent = await response.Content.ReadAsStringAsync();
|
||||||
|
|
||||||
|
if (!responseContent.Contains("/user/logout"))
|
||||||
|
{
|
||||||
|
throw new ExceptionWithConfigData("Failed to login, 6 failed attempts will get you banned for 6 hours.", (ConfigurationData)config);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
AllowRaws = config.IncludeRaw.Value;
|
||||||
|
var configSaveData = new JObject();
|
||||||
|
configSaveData["cookies"] = cookieContainer.ToJson(SiteLink);
|
||||||
|
configSaveData["raws"] = AllowRaws;
|
||||||
|
|
||||||
|
if (OnSaveConfigurationRequested != null)
|
||||||
|
OnSaveConfigurationRequested(this, configSaveData);
|
||||||
|
|
||||||
|
IsConfigured = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void LoadFromSavedConfiguration(JToken jsonConfig)
|
||||||
|
{
|
||||||
|
cookieContainer.FillFromJson(new Uri(BaseUrl), (JArray)jsonConfig["cookies"]);
|
||||||
|
IsConfigured = true;
|
||||||
|
AllowRaws = jsonConfig["raws"].Value<bool>();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private string Hash(string input)
|
||||||
|
{
|
||||||
|
// Use input string to calculate MD5 hash
|
||||||
|
MD5 md5 = System.Security.Cryptography.MD5.Create();
|
||||||
|
byte[] inputBytes = System.Text.Encoding.ASCII.GetBytes(input);
|
||||||
|
byte[] hashBytes = md5.ComputeHash(inputBytes);
|
||||||
|
|
||||||
|
// Convert the byte array to hexadecimal string
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
for (int i = 0; i < hashBytes.Length; i++)
|
||||||
|
{
|
||||||
|
sb.Append(hashBytes[i].ToString("X2"));
|
||||||
|
}
|
||||||
|
return sb.ToString();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void CleanCache()
|
||||||
|
{
|
||||||
|
foreach (var expired in cache.Where(i => i.Created - DateTime.Now > cacheTime).ToList())
|
||||||
|
{
|
||||||
|
cache.Remove(expired);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public async Task<ReleaseInfo[]> PerformQuery(TorznabQuery query)
|
||||||
|
{
|
||||||
|
// This tracker only deals with full seasons so chop off the episode/season number if we have it D:
|
||||||
|
if (!string.IsNullOrWhiteSpace(query.SearchTerm))
|
||||||
|
{
|
||||||
|
var splitindex = query.SearchTerm.LastIndexOf(' ');
|
||||||
|
if (splitindex > -1)
|
||||||
|
query.SearchTerm = query.SearchTerm.Substring(0, splitindex);
|
||||||
|
}
|
||||||
|
|
||||||
|
// The result list
|
||||||
|
var releases = new List<ReleaseInfo>();
|
||||||
|
|
||||||
|
// Check cache first so we don't query the server for each episode when searching for each episode in a series.
|
||||||
|
lock (cache)
|
||||||
|
{
|
||||||
|
// Remove old cache items
|
||||||
|
CleanCache();
|
||||||
|
|
||||||
|
var cachedResult = cache.Where(i => i.Query == query.SearchTerm).FirstOrDefault();
|
||||||
|
if (cachedResult != null)
|
||||||
|
return cachedResult.Results.Select(s => (ReleaseInfo)s.Clone()).ToArray();
|
||||||
|
}
|
||||||
|
|
||||||
|
var queryUrl = SearchUrl;
|
||||||
|
// Only include the query bit if its required as hopefully the site caches the non query page
|
||||||
|
if(!string.IsNullOrWhiteSpace(query.SearchTerm)){
|
||||||
|
|
||||||
|
queryUrl += "&action=advanced&search_type=title&sort=time_added&way=desc&anime%5Btv_series%5D=1&searchstr=" + WebUtility.UrlEncode(query.SearchTerm);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the content from the tracker
|
||||||
|
var response = await client.GetAsync(queryUrl);
|
||||||
|
var responseContent = await response.Content.ReadAsStringAsync();
|
||||||
|
CQ dom = responseContent;
|
||||||
|
|
||||||
|
// Parse
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var releaseInfo = "S01";
|
||||||
|
var root = dom.Find(".anime");
|
||||||
|
// We may have got redirected to the series page if we have none of these
|
||||||
|
if (root.Count() == 0)
|
||||||
|
root = dom.Find(".torrent_table");
|
||||||
|
|
||||||
|
foreach (var series in root)
|
||||||
|
{
|
||||||
|
var seriesCq = series.Cq();
|
||||||
|
|
||||||
|
var synonyms = new List<string>();
|
||||||
|
var mainTitle = seriesCq.Find(".group_title strong a").First().Text().Trim();
|
||||||
|
|
||||||
|
var yearStr = seriesCq.Find(".group_title strong").First().Text().Trim().Replace("]", "").Trim();
|
||||||
|
int yearIndex = yearStr.LastIndexOf("[");
|
||||||
|
if (yearIndex > -1)
|
||||||
|
yearStr = yearStr.Substring(yearIndex+1);
|
||||||
|
|
||||||
|
int year = 0;
|
||||||
|
if (!int.TryParse(yearStr, out year))
|
||||||
|
year = DateTime.Now.Year;
|
||||||
|
|
||||||
|
synonyms.Add(mainTitle);
|
||||||
|
|
||||||
|
// If the title contains a comma then we can't use the synonyms as they are comma seperated
|
||||||
|
if (!mainTitle.Contains(","))
|
||||||
|
{
|
||||||
|
var symnomnNames = string.Empty;
|
||||||
|
foreach (var e in seriesCq.Find(".group_statbox li"))
|
||||||
|
{
|
||||||
|
if (e.FirstChild.InnerText == "Synonyms:")
|
||||||
|
{
|
||||||
|
symnomnNames = e.InnerText;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(symnomnNames))
|
||||||
|
{
|
||||||
|
foreach (var name in symnomnNames.Split(",".ToCharArray(), StringSplitOptions.RemoveEmptyEntries))
|
||||||
|
{
|
||||||
|
var theName = name.Trim();
|
||||||
|
if (!theName.Contains("&#") && !string.IsNullOrWhiteSpace(theName))
|
||||||
|
{
|
||||||
|
synonyms.Add(theName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
foreach (var title in synonyms)
|
||||||
|
{
|
||||||
|
var releaseRows = seriesCq.Find(".torrent_group tr");
|
||||||
|
|
||||||
|
// Skip the first two info rows
|
||||||
|
for (int r = 2; r < releaseRows.Count(); r++)
|
||||||
|
{
|
||||||
|
var row = releaseRows.Get(r);
|
||||||
|
var rowCq = row.Cq();
|
||||||
|
if (rowCq.HasClass("edition_info"))
|
||||||
|
{
|
||||||
|
releaseInfo = rowCq.Find("td").Text();
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(releaseInfo))
|
||||||
|
{
|
||||||
|
// Single episodes alpha - Reported that this info is missing.
|
||||||
|
// It should self correct when availible
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
releaseInfo = releaseInfo.Replace("Episode ", "");
|
||||||
|
releaseInfo = releaseInfo.Replace("Season ", "S");
|
||||||
|
releaseInfo = releaseInfo.Trim();
|
||||||
|
}
|
||||||
|
else if (rowCq.HasClass("torrent"))
|
||||||
|
{
|
||||||
|
var links = rowCq.Find("a");
|
||||||
|
// Protect against format changes
|
||||||
|
if (links.Count() != 2)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var release = new ReleaseInfo();
|
||||||
|
release.MinimumRatio = 1;
|
||||||
|
release.MinimumSeedTime = 259200;
|
||||||
|
var downloadLink = links.Get(0);
|
||||||
|
release.Guid = new Uri(BaseUrl + "/" + downloadLink.Attributes.GetAttribute("href") + "&nh=" + Hash(title)); // Sonarr should dedupe on this url - allow a url per name.
|
||||||
|
release.Link = release.Guid;// We dont know this so try to fake based on the release year
|
||||||
|
release.PublishDate = new DateTime(year,1,1);
|
||||||
|
release.PublishDate = release.PublishDate.AddDays(Math.Min(DateTime.Now.DayOfYear, 365) - 1);
|
||||||
|
|
||||||
|
var infoLink = links.Get(1);
|
||||||
|
release.Comments = new Uri(BaseUrl + "/" + infoLink.Attributes.GetAttribute("href"));
|
||||||
|
|
||||||
|
// We dont actually have a release name >.> so try to create one
|
||||||
|
var releaseTags = infoLink.InnerText.Split("|".ToCharArray(), StringSplitOptions.RemoveEmptyEntries).ToList();
|
||||||
|
for (int i = releaseTags.Count - 1; i >= 0; i--)
|
||||||
|
{
|
||||||
|
releaseTags[i] = releaseTags[i].Trim();
|
||||||
|
if (string.IsNullOrWhiteSpace(releaseTags[i]))
|
||||||
|
releaseTags.RemoveAt(i);
|
||||||
|
}
|
||||||
|
|
||||||
|
var group = releaseTags.Last();
|
||||||
|
if (group.Contains("(") && group.Contains(")"))
|
||||||
|
{
|
||||||
|
// Skip raws if set
|
||||||
|
if (group.ToLowerInvariant().StartsWith("raw") && !AllowRaws)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var start = group.IndexOf("(");
|
||||||
|
group = "[" + group.Substring(start + 1, (group.IndexOf(")") - 1) - start) + "] ";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
group = string.Empty;
|
||||||
|
}
|
||||||
|
|
||||||
|
var infoString = "";
|
||||||
|
|
||||||
|
for (int i = 0; i + 1 < releaseTags.Count(); i++)
|
||||||
|
{
|
||||||
|
infoString += "[" + releaseTags[i] + "]";
|
||||||
|
}
|
||||||
|
|
||||||
|
release.Title = string.Format("{0}{1} {2} {3}", group, title, releaseInfo, infoString);
|
||||||
|
release.Description = title;
|
||||||
|
|
||||||
|
var size = rowCq.Find(".torrent_size");
|
||||||
|
if (size.Count() > 0)
|
||||||
|
{
|
||||||
|
var sizeParts = size.First().Text().Split(' ');
|
||||||
|
release.Size = ReleaseInfo.GetBytes(sizeParts[1], float.Parse(sizeParts[0]));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Additional 5 hours per GB
|
||||||
|
release.MinimumSeedTime += (release.Size / 1000000000) * 18000;
|
||||||
|
|
||||||
|
// Peer info
|
||||||
|
release.Seeders = int.Parse(rowCq.Find(".torrent_seeders").Text());
|
||||||
|
release.Peers = release.Seeders + int.Parse(rowCq.Find(".torrent_leechers").Text());
|
||||||
|
|
||||||
|
releases.Add(release);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
OnResultParsingError(this, responseContent, ex);
|
||||||
|
throw ex;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Add to the cache
|
||||||
|
lock (cache)
|
||||||
|
{
|
||||||
|
cache.Add(new CachedResult(query.SearchTerm, releases));
|
||||||
|
}
|
||||||
|
|
||||||
|
return releases.Select(s => (ReleaseInfo)s.Clone()).ToArray();
|
||||||
|
}
|
||||||
|
|
||||||
|
public Task<byte[]> Download(Uri link)
|
||||||
|
{
|
||||||
|
return client.GetByteArrayAsync(link);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -149,7 +149,7 @@ namespace Jackett.Indexers
|
||||||
release.Size = ReleaseInfo.GetBytes(sizeStringParts[1], float.Parse(sizeStringParts[0]));
|
release.Size = ReleaseInfo.GetBytes(sizeStringParts[1], float.Parse(sizeStringParts[0]));
|
||||||
|
|
||||||
release.Seeders = int.Parse(qRow.Find(".seeders").Text());
|
release.Seeders = int.Parse(qRow.Find(".seeders").Text());
|
||||||
release.Peers = int.Parse(qRow.Find(".leechers").Text());
|
release.Peers = release.Seeders + int.Parse(qRow.Find(".leechers").Text());
|
||||||
|
|
||||||
releases.Add(release);
|
releases.Add(release);
|
||||||
}
|
}
|
||||||
|
|
|
@ -81,8 +81,10 @@
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<Compile Include="ApiKey.cs" />
|
<Compile Include="ApiKey.cs" />
|
||||||
|
<Compile Include="CachedResult.cs" />
|
||||||
<Compile Include="ChannelInfo.cs" />
|
<Compile Include="ChannelInfo.cs" />
|
||||||
<Compile Include="ConfigurationData.cs" />
|
<Compile Include="ConfigurationData.cs" />
|
||||||
|
<Compile Include="ConfigurationDataBasicLoginAnimeBytes.cs" />
|
||||||
<Compile Include="ConfigurationDataBasicLogin.cs" />
|
<Compile Include="ConfigurationDataBasicLogin.cs" />
|
||||||
<Compile Include="ConfigurationDataUrl.cs" />
|
<Compile Include="ConfigurationDataUrl.cs" />
|
||||||
<Compile Include="CookieContainerExtensions.cs" />
|
<Compile Include="CookieContainerExtensions.cs" />
|
||||||
|
@ -102,6 +104,7 @@
|
||||||
<Compile Include="Indexers\Strike.cs" />
|
<Compile Include="Indexers\Strike.cs" />
|
||||||
<Compile Include="Indexers\ThePirateBay.cs" />
|
<Compile Include="Indexers\ThePirateBay.cs" />
|
||||||
<Compile Include="Indexers\TorrentDay.cs" />
|
<Compile Include="Indexers\TorrentDay.cs" />
|
||||||
|
<Compile Include="Indexers\AnimeBytes.cs" />
|
||||||
<Compile Include="Indexers\TorrentLeech.cs" />
|
<Compile Include="Indexers\TorrentLeech.cs" />
|
||||||
<Compile Include="Indexers\TorrentShack.cs" />
|
<Compile Include="Indexers\TorrentShack.cs" />
|
||||||
<Compile Include="Indexers\Torrentz.cs" />
|
<Compile Include="Indexers\Torrentz.cs" />
|
||||||
|
@ -148,8 +151,15 @@
|
||||||
</EmbeddedResource>
|
</EmbeddedResource>
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<Content Include="WebContent\custom.css" />
|
<Content Include="WebContent\custom.css">
|
||||||
<Content Include="WebContent\custom.js" />
|
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||||
|
</Content>
|
||||||
|
<Content Include="WebContent\custom.js">
|
||||||
|
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||||
|
</Content>
|
||||||
|
<Content Include="WebContent\logos\animebytes.png">
|
||||||
|
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||||
|
</Content>
|
||||||
<Content Include="WebContent\logos\sceneaccess.png">
|
<Content Include="WebContent\logos\sceneaccess.png">
|
||||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||||
</Content>
|
</Content>
|
||||||
|
|
|
@ -7,7 +7,7 @@ using System.Threading.Tasks;
|
||||||
namespace Jackett
|
namespace Jackett
|
||||||
{
|
{
|
||||||
|
|
||||||
public class ReleaseInfo
|
public class ReleaseInfo: ICloneable
|
||||||
{
|
{
|
||||||
public string Title { get; set; }
|
public string Title { get; set; }
|
||||||
public Uri Guid { get; set; }
|
public Uri Guid { get; set; }
|
||||||
|
@ -28,6 +28,30 @@ namespace Jackett
|
||||||
public double? MinimumRatio { get; set; }
|
public double? MinimumRatio { get; set; }
|
||||||
public long? MinimumSeedTime { get; set; }
|
public long? MinimumSeedTime { get; set; }
|
||||||
|
|
||||||
|
public object Clone()
|
||||||
|
{
|
||||||
|
return new ReleaseInfo()
|
||||||
|
{
|
||||||
|
Title = Title,
|
||||||
|
Guid = Guid,
|
||||||
|
Link = Link,
|
||||||
|
Comments = Comments,
|
||||||
|
PublishDate = PublishDate,
|
||||||
|
Category = Category,
|
||||||
|
Size = Size,
|
||||||
|
Description = Description,
|
||||||
|
RageID = RageID,
|
||||||
|
Imdb = Imdb,
|
||||||
|
Seeders = Seeders,
|
||||||
|
Peers = Peers,
|
||||||
|
ConverUrl = ConverUrl,
|
||||||
|
BannerUrl = BannerUrl,
|
||||||
|
InfoHash = InfoHash,
|
||||||
|
MagnetUri = MagnetUri,
|
||||||
|
MinimumRatio = MinimumRatio,
|
||||||
|
MinimumSeedTime = MinimumSeedTime
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
public static long GetBytes(string unit, float value)
|
public static long GetBytes(string unit, float value)
|
||||||
{
|
{
|
||||||
|
|
|
@ -46,11 +46,24 @@ namespace Jackett
|
||||||
var q = new TorznabQuery();
|
var q = new TorznabQuery();
|
||||||
q.QueryType = query["t"];
|
q.QueryType = query["t"];
|
||||||
q.SearchTerm = query["q"];
|
q.SearchTerm = query["q"];
|
||||||
q.Categories = query["cat"].Split(',');
|
if (query["cat"] != null)
|
||||||
q.Extended = int.Parse(query["extended"]);
|
{
|
||||||
|
q.Categories = query["cat"].Split(',');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (query["extended"] != null)
|
||||||
|
{
|
||||||
|
q.Extended = int.Parse(query["extended"]);
|
||||||
|
}
|
||||||
q.ApiKey = query["apikey"];
|
q.ApiKey = query["apikey"];
|
||||||
q.Limit = int.Parse(query["limit"]);
|
if (query["limit"] != null)
|
||||||
q.Offset = int.Parse(query["offset"]);
|
{
|
||||||
|
q.Limit = int.Parse(query["limit"]);
|
||||||
|
}
|
||||||
|
if (query["offset"] != null)
|
||||||
|
{
|
||||||
|
q.Offset = int.Parse(query["offset"]);
|
||||||
|
}
|
||||||
|
|
||||||
int temp;
|
int temp;
|
||||||
if (int.TryParse(query["rid"], out temp))
|
if (int.TryParse(query["rid"], out temp))
|
||||||
|
|
|
@ -95,6 +95,11 @@
|
||||||
max-width: 260px;
|
max-width: 260px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.setup-item-inputbool input {
|
||||||
|
max-width: 100px;
|
||||||
|
height: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
.spinner {
|
.spinner {
|
||||||
-webkit-animation: spin 2s infinite linear;
|
-webkit-animation: spin 2s infinite linear;
|
||||||
-moz-animation: spin 2s infinite linear;
|
-moz-animation: spin 2s infinite linear;
|
||||||
|
|
|
@ -219,7 +219,7 @@ function getConfigModalJson(configForm) {
|
||||||
configJson[id] = $el.find(".setup-item-inputstring").val();
|
configJson[id] = $el.find(".setup-item-inputstring").val();
|
||||||
break;
|
break;
|
||||||
case "inputbool":
|
case "inputbool":
|
||||||
configJson[id] = $el.find(".setup-item-checkbox").val();
|
configJson[id] = $el.find(".setup-item-inputbool input").is(":checked");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -148,11 +148,11 @@
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<input class="setup-item-inputstring form-control" type="text" value="{{{value}}}" />
|
<input class="setup-item-inputstring form-control" type="text" value="{{{value}}}" />
|
||||||
<div class="setup-item-checkbox">
|
<div class="setup-item-inputbool">
|
||||||
{{#if value}}
|
{{#if value}}
|
||||||
<input type="checkbox" class="form-control" checked />
|
<input type="checkbox" data-id="{{id}}" class="form-control" checked />
|
||||||
{{else}}
|
{{else}}
|
||||||
<input type="checkbox" class="form-control" />
|
<input type="checkbox" data-id="{{id}}" class="form-control" />
|
||||||
{{/if}}
|
{{/if}}
|
||||||
</div>
|
</div>
|
||||||
<img class="setup-item-displayimage" src="{{{value}}}" />
|
<img class="setup-item-displayimage" src="{{{value}}}" />
|
||||||
|
|
Binary file not shown.
After Width: | Height: | Size: 3.8 KiB |
Loading…
Reference in New Issue