diff --git a/src/Jackett/CachedResult.cs b/src/Jackett/CachedResult.cs new file mode 100644 index 000000000..925d9a139 --- /dev/null +++ b/src/Jackett/CachedResult.cs @@ -0,0 +1,36 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace Jackett +{ + public class CachedResult + { + private List results; + private DateTime created; + private string query; + + public CachedResult(string query, List results){ + this.results = results; + created = DateTime.Now; + this.query = query; + } + + public IReadOnlyList Results + { + get { return results.AsReadOnly(); } + } + + public DateTime Created + { + get { return created; } + } + + public string Query + { + get { return query; } + } + } +} diff --git a/src/Jackett/ConfigurationDataBasicLoginAnimeBytes.cs b/src/Jackett/ConfigurationDataBasicLoginAnimeBytes.cs new file mode 100644 index 000000000..bd5e9ab0a --- /dev/null +++ b/src/Jackett/ConfigurationDataBasicLoginAnimeBytes.cs @@ -0,0 +1,27 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace Jackett +{ + public class ConfigurationDataBasicLoginAnimeBytes : ConfigurationDataBasicLogin + { + public BoolItem IncludeRaw { get; private set; } + public DisplayItem RageIdWarning { get; private set; } + public DisplayItem DateWarning { get; private set; } + + public ConfigurationDataBasicLoginAnimeBytes(): base() + { + IncludeRaw = new BoolItem() { Name = "IncludeRaw", Value = false }; + RageIdWarning = new DisplayItem("Ensure rageid lookup is disabled in Sonarr for this tracker.") { Name = "RageWarning" }; + DateWarning = new DisplayItem("This tracker does not supply upload dates so they are based off year of release.") { Name = "DateWarning" }; + } + + public override Item[] GetItems() + { + return new Item[] { Username, Password, IncludeRaw, RageIdWarning, DateWarning }; + } + } +} diff --git a/src/Jackett/Indexers/AnimeBytes.cs b/src/Jackett/Indexers/AnimeBytes.cs new file mode 100644 index 000000000..e368bb487 --- /dev/null +++ b/src/Jackett/Indexers/AnimeBytes.cs @@ -0,0 +1,387 @@ +using CsQuery; +using Newtonsoft.Json.Linq; +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Security.Cryptography; +using System.Text; +using System.Text.RegularExpressions; +using System.Threading.Tasks; +using System.Web; + +namespace Jackett.Indexers +{ + public class AnimeBytes : IndexerInterface + { + private static List cache = new List(); + private static readonly TimeSpan cacheTime = new TimeSpan(0, 9, 0); + + public event Action OnResultParsingError; + public event Action OnSaveConfigurationRequested; + + static string chromeUserAgent = "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36"; + + public string DisplayName + { + get { return "AnimeBytes"; } + } + + public string DisplayDescription + { + get { return "The web's best Chinese cartoons"; } + } + + public Uri SiteLink + { + get { return new Uri(BaseUrl); } + } + + const string BaseUrl = "https://animebytes.tv"; + const string LoginUrl = BaseUrl + "/user/login"; + const string SearchUrl = BaseUrl + "/torrents.php?filter_cat[1]=1"; + + public bool IsConfigured { get; private set; } + public bool AllowRaws { get; private set; } + + + CookieContainer cookieContainer; + HttpClientHandler handler; + HttpClient client; + + public AnimeBytes() + { + IsConfigured = false; + cookieContainer = new CookieContainer(); + handler = new HttpClientHandler + { + CookieContainer = cookieContainer, + AllowAutoRedirect = false, + UseCookies = true, + }; + client = new HttpClient(handler); + client.DefaultRequestHeaders.Add("User-Agent", chromeUserAgent); + } + + public Task GetConfigurationForSetup() + { + var config = new ConfigurationDataBasicLoginAnimeBytes(); + return Task.FromResult(config); + } + + public async Task ApplyConfiguration(JToken configJson) + { + var config = new ConfigurationDataBasicLoginAnimeBytes(); + config.LoadValuesFromJson(configJson); + + + // Get the login form as we need the CSRF Token + var loginPage = await client.GetAsync(LoginUrl); + CQ loginPageDom = await loginPage.Content.ReadAsStringAsync(); + var csrfToken = loginPageDom["input[name=\"csrf_token\"]"].Last(); + + // Build login form + var pairs = new Dictionary { + { "csrf_token", csrfToken.Attr("value") }, + { "username", config.Username.Value }, + { "password", config.Password.Value }, + { "keeplogged_sent", "true" }, + { "keeplogged", "on" }, + { "login", "Log In!" } + }; + + var content = new FormUrlEncodedContent(pairs); + + // Do the login + var response = await client.PostAsync(LoginUrl, content); + var responseContent = await response.Content.ReadAsStringAsync(); + + // Compatiblity issue between the cookie format and httpclient + // Pull it out manually ignoring the expiry date then set it manually + // http://stackoverflow.com/questions/14681144/httpclient-not-storing-cookies-in-cookiecontainer + IEnumerable cookies; + if (response.Headers.TryGetValues("set-cookie", out cookies)) + { + foreach (var c in cookies) + { + cookieContainer.SetCookies(new Uri(BaseUrl), c.Substring(0, c.LastIndexOf(';'))); + } + } + + foreach (Cookie cookie in cookieContainer.GetCookies(new Uri(BaseUrl))) + { + if (cookie.Name == "session") + { + cookie.Expires = DateTime.Now.AddDays(360); + break; + } + } + + // Get the home page now we are logged in as AllowAutoRedirect is false as we needed to get the cookie manually. + response = await client.GetAsync(BaseUrl); + responseContent = await response.Content.ReadAsStringAsync(); + + if (!responseContent.Contains("/user/logout")) + { + throw new ExceptionWithConfigData("Failed to login, 6 failed attempts will get you banned for 6 hours.", (ConfigurationData)config); + } + else + { + AllowRaws = config.IncludeRaw.Value; + var configSaveData = new JObject(); + configSaveData["cookies"] = cookieContainer.ToJson(SiteLink); + configSaveData["raws"] = AllowRaws; + + if (OnSaveConfigurationRequested != null) + OnSaveConfigurationRequested(this, configSaveData); + + IsConfigured = true; + } + } + + public void LoadFromSavedConfiguration(JToken jsonConfig) + { + cookieContainer.FillFromJson(new Uri(BaseUrl), (JArray)jsonConfig["cookies"]); + IsConfigured = true; + AllowRaws = jsonConfig["raws"].Value(); + } + + + private string Hash(string input) + { + // Use input string to calculate MD5 hash + MD5 md5 = System.Security.Cryptography.MD5.Create(); + byte[] inputBytes = System.Text.Encoding.ASCII.GetBytes(input); + byte[] hashBytes = md5.ComputeHash(inputBytes); + + // Convert the byte array to hexadecimal string + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < hashBytes.Length; i++) + { + sb.Append(hashBytes[i].ToString("X2")); + } + return sb.ToString(); + } + + private void CleanCache() + { + foreach (var expired in cache.Where(i => i.Created - DateTime.Now > cacheTime).ToList()) + { + cache.Remove(expired); + } + } + + + public async Task PerformQuery(TorznabQuery query) + { + // This tracker only deals with full seasons so chop off the episode/season number if we have it D: + if (!string.IsNullOrWhiteSpace(query.SearchTerm)) + { + var splitindex = query.SearchTerm.LastIndexOf(' '); + if (splitindex > -1) + query.SearchTerm = query.SearchTerm.Substring(0, splitindex); + } + + // The result list + var releases = new List(); + + // Check cache first so we don't query the server for each episode when searching for each episode in a series. + lock (cache) + { + // Remove old cache items + CleanCache(); + + var cachedResult = cache.Where(i => i.Query == query.SearchTerm).FirstOrDefault(); + if (cachedResult != null) + return cachedResult.Results.Select(s => (ReleaseInfo)s.Clone()).ToArray(); + } + + var queryUrl = SearchUrl; + // Only include the query bit if its required as hopefully the site caches the non query page + if(!string.IsNullOrWhiteSpace(query.SearchTerm)){ + + queryUrl += "&action=advanced&search_type=title&sort=time_added&way=desc&anime%5Btv_series%5D=1&searchstr=" + WebUtility.UrlEncode(query.SearchTerm); + } + + // Get the content from the tracker + var response = await client.GetAsync(queryUrl); + var responseContent = await response.Content.ReadAsStringAsync(); + CQ dom = responseContent; + + // Parse + try + { + var releaseInfo = "S01"; + var root = dom.Find(".anime"); + // We may have got redirected to the series page if we have none of these + if (root.Count() == 0) + root = dom.Find(".torrent_table"); + + foreach (var series in root) + { + var seriesCq = series.Cq(); + + var synonyms = new List(); + var mainTitle = seriesCq.Find(".group_title strong a").First().Text().Trim(); + + var yearStr = seriesCq.Find(".group_title strong").First().Text().Trim().Replace("]", "").Trim(); + int yearIndex = yearStr.LastIndexOf("["); + if (yearIndex > -1) + yearStr = yearStr.Substring(yearIndex+1); + + int year = 0; + if (!int.TryParse(yearStr, out year)) + year = DateTime.Now.Year; + + synonyms.Add(mainTitle); + + // If the title contains a comma then we can't use the synonyms as they are comma seperated + if (!mainTitle.Contains(",")) + { + var symnomnNames = string.Empty; + foreach (var e in seriesCq.Find(".group_statbox li")) + { + if (e.FirstChild.InnerText == "Synonyms:") + { + symnomnNames = e.InnerText; + } + } + + if (!string.IsNullOrWhiteSpace(symnomnNames)) + { + foreach (var name in symnomnNames.Split(",".ToCharArray(), StringSplitOptions.RemoveEmptyEntries)) + { + var theName = name.Trim(); + if (!theName.Contains("&#") && !string.IsNullOrWhiteSpace(theName)) + { + synonyms.Add(theName); + } + } + } + } + + foreach (var title in synonyms) + { + var releaseRows = seriesCq.Find(".torrent_group tr"); + + // Skip the first two info rows + for (int r = 2; r < releaseRows.Count(); r++) + { + var row = releaseRows.Get(r); + var rowCq = row.Cq(); + if (rowCq.HasClass("edition_info")) + { + releaseInfo = rowCq.Find("td").Text(); + + if (string.IsNullOrWhiteSpace(releaseInfo)) + { + // Single episodes alpha - Reported that this info is missing. + // It should self correct when availible + break; + } + + releaseInfo = releaseInfo.Replace("Episode ", ""); + releaseInfo = releaseInfo.Replace("Season ", "S"); + releaseInfo = releaseInfo.Trim(); + } + else if (rowCq.HasClass("torrent")) + { + var links = rowCq.Find("a"); + // Protect against format changes + if (links.Count() != 2) + { + continue; + } + + var release = new ReleaseInfo(); + release.MinimumRatio = 1; + release.MinimumSeedTime = 259200; + var downloadLink = links.Get(0); + release.Guid = new Uri(BaseUrl + "/" + downloadLink.Attributes.GetAttribute("href") + "&nh=" + Hash(title)); // Sonarr should dedupe on this url - allow a url per name. + release.Link = release.Guid;// We dont know this so try to fake based on the release year + release.PublishDate = new DateTime(year,1,1); + release.PublishDate = release.PublishDate.AddDays(Math.Min(DateTime.Now.DayOfYear, 365) - 1); + + var infoLink = links.Get(1); + release.Comments = new Uri(BaseUrl + "/" + infoLink.Attributes.GetAttribute("href")); + + // We dont actually have a release name >.> so try to create one + var releaseTags = infoLink.InnerText.Split("|".ToCharArray(), StringSplitOptions.RemoveEmptyEntries).ToList(); + for (int i = releaseTags.Count - 1; i >= 0; i--) + { + releaseTags[i] = releaseTags[i].Trim(); + if (string.IsNullOrWhiteSpace(releaseTags[i])) + releaseTags.RemoveAt(i); + } + + var group = releaseTags.Last(); + if (group.Contains("(") && group.Contains(")")) + { + // Skip raws if set + if (group.ToLowerInvariant().StartsWith("raw") && !AllowRaws) + { + continue; + } + + var start = group.IndexOf("("); + group = "[" + group.Substring(start + 1, (group.IndexOf(")") - 1) - start) + "] "; + } + else + { + group = string.Empty; + } + + var infoString = ""; + + for (int i = 0; i + 1 < releaseTags.Count(); i++) + { + infoString += "[" + releaseTags[i] + "]"; + } + + release.Title = string.Format("{0}{1} {2} {3}", group, title, releaseInfo, infoString); + release.Description = title; + + var size = rowCq.Find(".torrent_size"); + if (size.Count() > 0) + { + var sizeParts = size.First().Text().Split(' '); + release.Size = ReleaseInfo.GetBytes(sizeParts[1], float.Parse(sizeParts[0])); + } + + // Additional 5 hours per GB + release.MinimumSeedTime += (release.Size / 1000000000) * 18000; + + // Peer info + release.Seeders = int.Parse(rowCq.Find(".torrent_seeders").Text()); + release.Peers = release.Seeders + int.Parse(rowCq.Find(".torrent_leechers").Text()); + + releases.Add(release); + } + } + } + } + } + catch (Exception ex) + { + OnResultParsingError(this, responseContent, ex); + throw ex; + } + + + // Add to the cache + lock (cache) + { + cache.Add(new CachedResult(query.SearchTerm, releases)); + } + + return releases.Select(s => (ReleaseInfo)s.Clone()).ToArray(); + } + + public Task Download(Uri link) + { + return client.GetByteArrayAsync(link); + } + } +} diff --git a/src/Jackett/Indexers/TorrentLeech.cs b/src/Jackett/Indexers/TorrentLeech.cs index 617666fa4..2e8610abf 100644 --- a/src/Jackett/Indexers/TorrentLeech.cs +++ b/src/Jackett/Indexers/TorrentLeech.cs @@ -149,7 +149,7 @@ namespace Jackett.Indexers release.Size = ReleaseInfo.GetBytes(sizeStringParts[1], float.Parse(sizeStringParts[0])); release.Seeders = int.Parse(qRow.Find(".seeders").Text()); - release.Peers = int.Parse(qRow.Find(".leechers").Text()); + release.Peers = release.Seeders + int.Parse(qRow.Find(".leechers").Text()); releases.Add(release); } diff --git a/src/Jackett/Jackett.csproj b/src/Jackett/Jackett.csproj index 342743671..dd29577c3 100644 --- a/src/Jackett/Jackett.csproj +++ b/src/Jackett/Jackett.csproj @@ -81,8 +81,10 @@ + + @@ -102,6 +104,7 @@ + @@ -148,8 +151,15 @@ - - + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + PreserveNewest diff --git a/src/Jackett/ReleaseInfo.cs b/src/Jackett/ReleaseInfo.cs index da77ce505..a6845102a 100644 --- a/src/Jackett/ReleaseInfo.cs +++ b/src/Jackett/ReleaseInfo.cs @@ -7,7 +7,7 @@ using System.Threading.Tasks; namespace Jackett { - public class ReleaseInfo + public class ReleaseInfo: ICloneable { public string Title { get; set; } public Uri Guid { get; set; } @@ -28,6 +28,30 @@ namespace Jackett public double? MinimumRatio { get; set; } public long? MinimumSeedTime { get; set; } + public object Clone() + { + return new ReleaseInfo() + { + Title = Title, + Guid = Guid, + Link = Link, + Comments = Comments, + PublishDate = PublishDate, + Category = Category, + Size = Size, + Description = Description, + RageID = RageID, + Imdb = Imdb, + Seeders = Seeders, + Peers = Peers, + ConverUrl = ConverUrl, + BannerUrl = BannerUrl, + InfoHash = InfoHash, + MagnetUri = MagnetUri, + MinimumRatio = MinimumRatio, + MinimumSeedTime = MinimumSeedTime + }; + } public static long GetBytes(string unit, float value) { diff --git a/src/Jackett/TorznabQuery.cs b/src/Jackett/TorznabQuery.cs index 1e114ce1c..e2f2a20f9 100644 --- a/src/Jackett/TorznabQuery.cs +++ b/src/Jackett/TorznabQuery.cs @@ -46,11 +46,24 @@ namespace Jackett var q = new TorznabQuery(); q.QueryType = query["t"]; q.SearchTerm = query["q"]; - q.Categories = query["cat"].Split(','); - q.Extended = int.Parse(query["extended"]); + if (query["cat"] != null) + { + q.Categories = query["cat"].Split(','); + } + + if (query["extended"] != null) + { + q.Extended = int.Parse(query["extended"]); + } q.ApiKey = query["apikey"]; - q.Limit = int.Parse(query["limit"]); - q.Offset = int.Parse(query["offset"]); + if (query["limit"] != null) + { + q.Limit = int.Parse(query["limit"]); + } + if (query["offset"] != null) + { + q.Offset = int.Parse(query["offset"]); + } int temp; if (int.TryParse(query["rid"], out temp)) diff --git a/src/Jackett/WebContent/custom.css b/src/Jackett/WebContent/custom.css index 1fa5cddc4..d52dffdbb 100644 --- a/src/Jackett/WebContent/custom.css +++ b/src/Jackett/WebContent/custom.css @@ -95,6 +95,11 @@ max-width: 260px; } + .setup-item-inputbool input { + max-width: 100px; + height: 20px; + } + .spinner { -webkit-animation: spin 2s infinite linear; -moz-animation: spin 2s infinite linear; diff --git a/src/Jackett/WebContent/custom.js b/src/Jackett/WebContent/custom.js index 6ca36f85f..4499ce97c 100644 --- a/src/Jackett/WebContent/custom.js +++ b/src/Jackett/WebContent/custom.js @@ -219,7 +219,7 @@ function getConfigModalJson(configForm) { configJson[id] = $el.find(".setup-item-inputstring").val(); break; case "inputbool": - configJson[id] = $el.find(".setup-item-checkbox").val(); + configJson[id] = $el.find(".setup-item-inputbool input").is(":checked"); break; } }); diff --git a/src/Jackett/WebContent/index.html b/src/Jackett/WebContent/index.html index 45c5af196..3bb738a0f 100644 --- a/src/Jackett/WebContent/index.html +++ b/src/Jackett/WebContent/index.html @@ -148,11 +148,11 @@ -
+
{{#if value}} - + {{else}} - + {{/if}}
diff --git a/src/Jackett/WebContent/logos/animebytes.png b/src/Jackett/WebContent/logos/animebytes.png new file mode 100644 index 000000000..7fcf04736 Binary files /dev/null and b/src/Jackett/WebContent/logos/animebytes.png differ