Merge branch 'master' into dotnetcore

This commit is contained in:
flightlevel 2018-06-16 18:36:54 +10:00
commit a25eb7f951
16 changed files with 94 additions and 168 deletions

View File

@ -241,7 +241,6 @@ Developer note: The software implements the [Torznab](https://github.com/Sonarr/
* SportsCult
* SportHD
* Superbits
* Synthesiz3r
* Tasmanit
* TBPlus
* TenYardTracker
@ -272,7 +271,6 @@ Developer note: The software implements the [Torznab](https://github.com/Sonarr/
* TorrentSeeds
* Torrent-Syndikat
* TOrrent-tuRK (TORK)
* TorrentWTF
* TorViet
* ToTheGlory
* TranceTraffic

View File

@ -72,7 +72,7 @@
rows:
selector: tr.browse_color, tr.freeleech_color, tr[id^="kdescr"]
after: 1
fields:
fields: # some users (rank specific?) have an extra column (td:nth-child(4)) with bookmark features
banner:
selector: a[href^="details.php?id="][onmouseover]
attribute: onmouseover
@ -98,20 +98,20 @@
selector: a[href^="download.php"]
attribute: href
files:
selector: td:nth-child(4)
selector: a[href^="filelist.php"]
size:
selector: td:nth-child(7)
selector: td:nth-last-child(6)
grabs:
selector: td:nth-child(8)
selector: td:nth-last-child(5)
filters:
- name: regexp
args: ([\d,]+)
seeders:
selector: td:nth-child(9)
selector: td:nth-last-child(4)
leechers:
selector: td:nth-child(10)
selector: td:nth-last-child(3)
date:
selector: td:nth-child(6)
selector: td:nth-last-child(7)
downloadvolumefactor:
case:
"a.info > b:contains(\"[FREE]\")": "0"

View File

@ -6,8 +6,9 @@
type: public
encoding: UTF-8
links:
- http://www.cpabien.cm/
- http://www.cpasbiens.cc/
legacylinks:
- http://www.cpabien.cm/
- http://cpabien.cm/
- http://cpasbiens1.com/
- http://cpabien.mx/

View File

@ -6,8 +6,9 @@
type: private
encoding: UTF-8
links:
- http://shareisland.org/
- https://shareisland.org/
legacylinks:
- http://shareisland.org/
- http://www.shareisland.org/
caps:

View File

@ -1,111 +0,0 @@
---
site: torrentwtf
name: Torrentwtf
description: "Torrentwtf is a Czech Private site for TV / MOVIES / GENERAL"
language: cs-cz
type: private
encoding: UTF-8
links:
- https://torrent.wtf/
caps:
categorymappings:
- {id: 1, cat: Movies, desc: "Filmy"}
- {id: 2, cat: TV, desc: "Seriály"}
- {id: 3, cat: Audio, desc: "Hudba"}
- {id: 5, cat: PC/Games, desc: "Hry"}
- {id: 6, cat: Books, desc: "Knihy"}
- {id: 8, cat: PC, desc: "Software"}
- {id: 9, cat: XXX, desc: "xXx"}
- {id: 10, cat: Other, desc: "Ostatní"}
modes:
search: [q]
tv-search: [q, season, ep, imdbid]
movie-search: [q, imdbid]
login:
path: /login
method: form
inputs:
username: "{{ .Config.username }}"
password: "{{ .Config.password }}"
error:
- selector: table.main:contains("Tieto poverenia sa nezhodujú s našimi záznamami.")
test:
path: /torrents
search:
paths:
- path: /filter
inputs:
$raw: "{{range .Categories}}categories[]={{.}}&{{end}}"
search: "{{if .Query.IMDBID}}{{else}}{{ .Keywords }}{{end}}"
imdb: "{{ .Query.IMDBIDShort }}"
tvdb: ""
tmdb: ""
sorting: created_at
direction: desc
qty: 100
preprocessingfilters:
- name: jsonjoinarray
args: ["$.result", ""]
- name: prepend
args: "<table>"
- name: append
args: "</table>"
rows:
selector: tr
fields:
category:
selector: a[href*="/categories/"]
attribute: href
filters:
- name: regexp
args: "/categories/.*?\\.(\\d+)"
title:
selector: a.view-torrent
filters:
- name: re_replace
args: [".*? / ", ""]
download:
selector: a[href*="/download_check/"]
attribute: href
filters:
- name: replace
args: ["/download_check/", "/download/"]
details:
selector: a.view-torrent
attribute: href
imdb:
optional: true
selector: a[href*="://www.imdb.com/title/"]
attribute: href
size:
selector: td:nth-child(5)
seeders:
selector: td:nth-child(7)
leechers:
selector: td:nth-child(8)
grabs:
selector: td:nth-child(6)
filters:
- name: regexp
args: ([\d\.]+)
date:
selector: time
attribute: datetime
filters:
- name: append
args: " +00:00"
- name: dateparse
args: "2006-01-02 15:04:05 -07:00"
downloadvolumefactor:
case:
"i[data-original-title=\"100% Free\"]": "0"
"i[data-original-title=\"Global FreeLeech\"]": "0"
"*": "1"
uploadvolumefactor:
case:
"i[data-original-title=\"Double upload\"]": "2"
"*": "1"

View File

@ -120,7 +120,7 @@
selector: table#browsetable > tbody > tr:has(a[href^="/details.php?id="])
fields:
category:
selector: a[href^="/browse.php?q="]
selector: a[href^="/browse.php"]
attribute: href
filters:
- name: querystring

View File

@ -283,5 +283,28 @@ namespace Jackett.Common.Indexers.Abstract
release.UploadVolumeFactor = 0;
}
}
public override async Task<byte[]> Download(Uri link)
{
var content = await base.Download(link);
// Check if we're out of FL tokens
// most gazelle trackers will simply return the torrent anyway but e.g. redacted will return an error
var requestLink = link.ToString();
if (content.Length >= 1
&& content[0] != 'd' // simple test for torrent vs HTML content
&& requestLink.Contains("usetoken=1"))
{
var html = Encoding.GetString(content);
if (html.Contains("You do not have any freeleech tokens left."))
{
// download again with usetoken=0
var requestLinkNew = requestLink.Replace("usetoken=1", "usetoken=0");
content = await base.Download(new Uri(requestLinkNew));
}
}
return content;
}
}
}

View File

@ -34,6 +34,9 @@ namespace Jackett.Common.Indexers
public string Type { get; protected set; }
public virtual string ID { get { return GetIndexerID(GetType()); } }
[JsonConverter(typeof(EncodingJsonConverter))]
public Encoding Encoding { get; protected set; }
public virtual bool IsConfigured { get; protected set; }
protected Logger logger;
protected IIndexerConfigurationService configurationService;
@ -835,8 +838,6 @@ namespace Jackett.Common.Indexers
public override TorznabCapabilities TorznabCaps { get; protected set; }
[JsonConverter(typeof(EncodingJsonConverter))]
public Encoding Encoding { get; protected set; }
private List<CategoryMapping> categoryMapping = new List<CategoryMapping>();
protected WebClient webclient;

View File

@ -17,7 +17,7 @@ namespace Jackett.Common.Indexers
{
public class BitCityReloaded : BaseWebIndexer
{
private string LoginUrl { get { return SiteLink + "login.php"; } }
private string LoginUrl { get { return SiteLink + "login/index.php"; } }
private string BrowseUrl { get { return SiteLink + "uebersicht.php"; } }
private TimeZoneInfo germanyTz = TimeZoneInfo.CreateCustomTimeZone("W. Europe Standard Time", new TimeSpan(1, 0, 0), "W. Europe Standard Time", "W. Europe Standard Time");

View File

@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading.Tasks;
using Jackett.Common.Models;
using Jackett.Common.Models.IndexerConfig;
@ -30,6 +31,7 @@ namespace Jackett.Common.Indexers
string Language { get; }
string LastError { get; set; }
string ID { get; }
Encoding Encoding { get; }
TorznabCapabilities TorznabCaps { get; }

View File

@ -208,9 +208,37 @@ namespace Jackett.Common.Indexers
else
{
Match m = Regex.Match(release.Title, @""+ seasonep + @"\s?$", RegexOptions.IgnoreCase);
if (query.MatchQueryStringAND(release.Title, null, seasonep))
{
/* For sonnar if the search querry was english the title must be english also so we need to change the Description and Title */
var temp = release.Title;
// releasedata everithing after Name.S0Xe0X
String releasedata =release.Title.Split(new[] { seasonep }, StringSplitOptions.None)[1].Trim();
/* if the release name not contains the language we add it because it is know from category */
if (cat.Contains("hun") && !releasedata.Contains("hun"))
releasedata += ".hun";
// release description contains [imdb: ****] but we only need the data before it for title
String[] description = {release.Description, ""};
if (release.Description.Contains("[imdb:"))
{
description = release.Description.Split('[');
description[1] = "[" + description[1];
}
else
release.Title = (description[0].Trim() + "." + seasonep.Trim() + "." + releasedata.Trim('.')).Replace(' ', '.');
// if search is done for S0X than we dont want to put . between S0X and E0X
Match match = Regex.Match(releasedata, @"^E\d\d?");
if (seasonep.Length==3 && match.Success)
release.Title = (description[0].Trim() + "." + seasonep.Trim() + releasedata.Trim('.')).Replace(' ', '.');
// add back imdb points to the description [imdb: 8.7]
release.Description = temp+" "+ description[1];
release.Description = release.Description.Trim();
releases.Add(release);
}
}

View File

@ -1,36 +0,0 @@
using System.Collections.Generic;
using Jackett.Common.Indexers.Abstract;
using Jackett.Common.Models;
using Jackett.Common.Services.Interfaces;
using Jackett.Common.Utils.Clients;
using NLog;
namespace Jackett.Common.Indexers
{
public class Synthesiz3r : GazelleTracker
{
public Synthesiz3r(IIndexerConfigurationService configService, WebClient webClient, Logger logger, IProtectionService protectionService)
: base(name: "Synthesiz3r",
desc: "Synthesiz3r (ST3) is a Private Torrent Tracker for ELECTRONIC MUSIC",
link: "https://synthesiz3r.com/",
configService: configService,
logger: logger,
protectionService: protectionService,
webClient: webClient,
supportsFreeleechTokens: true
)
{
Language = "en-us";
Type = "private";
TorznabCaps.SupportedMusicSearchParamsList = new List<string>() { "q", "album", "artist", "label", "year" };
AddCategoryMapping(1, TorznabCatType.Audio, "Music");
AddCategoryMapping(2, TorznabCatType.PC, "Applications");
AddCategoryMapping(3, TorznabCatType.Books, "E-Books");
AddCategoryMapping(4, TorznabCatType.AudioAudiobook, "Audiobooks");
AddCategoryMapping(5, TorznabCatType.Movies, "E-Learning Videos");
AddCategoryMapping(6, TorznabCatType.TV, "Comedy");
AddCategoryMapping(7, TorznabCatType.Books, "Comics");
}
}
}

View File

@ -13,6 +13,7 @@ using CloudFlareUtilities;
using Jackett.Common.Models.Config;
using Jackett.Common.Services.Interfaces;
using NLog;
using Jackett.Common.Helpers;
namespace Jackett.Common.Utils.Clients
{
@ -257,7 +258,10 @@ namespace Jackett.Common.Utils.Clients
// See issue #1200
if (result.RedirectingTo != null && result.RedirectingTo.StartsWith("file://"))
{
var newRedirectingTo = result.RedirectingTo.Replace("file://", request.RequestUri.Scheme + "://" + request.RequestUri.Host);
// URL decoding apparently is needed to, without it e.g. Demonoid download is broken
// TODO: is it always needed (not just for relative redirects)?
var newRedirectingTo = WebUtilityHelpers.UrlDecode(result.RedirectingTo, webRequest.Encoding);
newRedirectingTo = newRedirectingTo.Replace("file://", request.RequestUri.Scheme + "://" + request.RequestUri.Host);
logger.Debug("[MONO relative redirect bug] Rewriting relative redirect URL from " + result.RedirectingTo + " to " + newRedirectingTo);
result.RedirectingTo = newRedirectingTo;
}

View File

@ -13,6 +13,7 @@ using CloudFlareUtilities;
using Jackett.Common.Models.Config;
using Jackett.Common.Services.Interfaces;
using NLog;
using Jackett.Common.Helpers;
namespace Jackett.Common.Utils.Clients
{
@ -277,7 +278,10 @@ namespace Jackett.Common.Utils.Clients
// See issue #1200
if (result.RedirectingTo != null && result.RedirectingTo.StartsWith("file://"))
{
var newRedirectingTo = result.RedirectingTo.Replace("file://", request.RequestUri.Scheme + "://" + request.RequestUri.Host);
// URL decoding apparently is needed to, without it e.g. Demonoid download is broken
// TODO: is it always needed (not just for relative redirects)?
var newRedirectingTo = WebUtilityHelpers.UrlDecode(result.RedirectingTo, webRequest.Encoding);
newRedirectingTo = newRedirectingTo.Replace("file://", request.RequestUri.Scheme + "://" + request.RequestUri.Host);
logger.Debug("[MONO relative redirect bug] Rewriting relative redirect URL from " + result.RedirectingTo + " to " + newRedirectingTo);
result.RedirectingTo = newRedirectingTo;
}

View File

@ -206,6 +206,7 @@ namespace Jackett.Updater
"Definitions/rockhardlossless.yml",
"Definitions/oxtorrent.yml",
"Definitions/tehconnection.yml",
"Definitions/torrentwtf.yml",
};
foreach (var oldFIle in oldFiles)

View File

@ -71,9 +71,19 @@ namespace Jackett.Controllers
}
// This will fix torrents where the keys are not sorted, and thereby not supported by Sonarr.
var parser = new BencodeParser();
var torrentDictionary = parser.Parse(downloadBytes);
byte[] sortedDownloadBytes = torrentDictionary.EncodeAsBytes();
byte[] sortedDownloadBytes = null;
try
{
var parser = new BencodeParser();
var torrentDictionary = parser.Parse(downloadBytes);
sortedDownloadBytes = torrentDictionary.EncodeAsBytes();
}
catch (Exception e)
{
var content = indexer.Encoding.GetString(downloadBytes);
logger.Error(content);
throw new Exception("BencodeParser failed", e);
}
var result = new HttpResponseMessage(HttpStatusCode.OK);
result.Content = new ByteArrayContent(sortedDownloadBytes);