2017-04-15 08:45:10 +00:00
using System ;
using System.Collections.Generic ;
using System.Collections.Specialized ;
2018-03-01 11:38:27 +00:00
using System.Globalization ;
2017-08-13 22:15:11 +00:00
using System.IO ;
2017-04-15 08:45:10 +00:00
using System.Linq ;
2017-11-05 09:42:03 +00:00
using System.Net ;
2017-04-15 08:45:10 +00:00
using System.Reflection ;
using System.Text ;
using System.Text.RegularExpressions ;
using System.Threading.Tasks ;
using CsQuery ;
2018-03-10 08:05:56 +00:00
using Jackett.Common.Models ;
using Jackett.Common.Models.IndexerConfig.Bespoke ;
using Jackett.Common.Services.Interfaces ;
using Jackett.Common.Utils ;
using Jackett.Common.Utils.Clients ;
2017-04-15 08:45:10 +00:00
using Newtonsoft.Json ;
using Newtonsoft.Json.Linq ;
using NLog ;
2018-03-10 08:05:56 +00:00
namespace Jackett.Common.Indexers
2017-04-15 08:45:10 +00:00
{
/// <summary>
/// Provider for Abnormal Private French Tracker
2018-03-01 11:38:27 +00:00
/// gazelle based but the ajax.php API seems to be broken (always returning failure)
2017-04-15 08:45:10 +00:00
/// </summary>
2017-07-10 20:58:44 +00:00
public class Abnormal : BaseCachingWebIndexer
2017-04-15 08:45:10 +00:00
{
private string LoginUrl { get { return SiteLink + "login.php" ; } }
private string SearchUrl { get { return SiteLink + "torrents.php" ; } }
private string TorrentCommentUrl { get { return TorrentDescriptionUrl ; } }
private string TorrentDescriptionUrl { get { return SiteLink + "torrents.php?id=" ; } }
private string TorrentDownloadUrl { get { return SiteLink + "torrents.php?action=download&id={id}&authkey={auth_key}&torrent_pass={torrent_pass}" ; } }
2019-05-05 21:13:10 +00:00
private string ReplaceMulti { get { return ConfigData . ReplaceMulti . Value ; } }
2017-04-15 08:45:10 +00:00
private bool Latency { get { return ConfigData . Latency . Value ; } }
private bool DevMode { get { return ConfigData . DevMode . Value ; } }
private bool CacheMode { get { return ConfigData . HardDriveCache . Value ; } }
2017-08-13 22:15:11 +00:00
private static string Directory = > Path . Combine ( Path . GetTempPath ( ) , Assembly . GetExecutingAssembly ( ) . GetName ( ) . Name . ToLower ( ) , MethodBase . GetCurrentMethod ( ) . DeclaringType ? . Name . ToLower ( ) ) ;
2017-04-15 08:45:10 +00:00
private Dictionary < string , string > emulatedBrowserHeaders = new Dictionary < string , string > ( ) ;
private CQ fDom = null ;
private ConfigurationDataAbnormal ConfigData
{
get { return ( ConfigurationDataAbnormal ) configData ; }
set { base . configData = value ; }
}
2017-11-05 09:42:03 +00:00
public Abnormal ( IIndexerConfigurationService configService , Utils . Clients . WebClient w , Logger l , IProtectionService ps )
2017-04-15 08:45:10 +00:00
: base (
name : "Abnormal" ,
description : "General French Private Tracker" ,
link : "https://abnormal.ws/" ,
caps : new TorznabCapabilities ( ) ,
2017-07-10 20:58:44 +00:00
configService : configService ,
2017-04-15 08:45:10 +00:00
client : w ,
logger : l ,
p : ps ,
downloadBase : "https://abnormal.ws/torrents.php?action=download&id=" ,
configData : new ConfigurationDataAbnormal ( ) )
{
Language = "fr-fr" ;
Encoding = Encoding . UTF8 ;
Type = "private" ;
// Clean capabilities
TorznabCaps . Categories . Clear ( ) ;
// Movies
AddCategoryMapping ( "MOVIE|DVDR" , TorznabCatType . MoviesDVD ) ; // DVDR
AddCategoryMapping ( "MOVIE|DVDRIP" , TorznabCatType . MoviesSD ) ; // DVDRIP
AddCategoryMapping ( "MOVIE|BDRIP" , TorznabCatType . MoviesSD ) ; // BDRIP
AddCategoryMapping ( "MOVIE|VOSTFR" , TorznabCatType . MoviesOther ) ; // VOSTFR
AddCategoryMapping ( "MOVIE|HD|720p" , TorznabCatType . MoviesHD ) ; // HD 720P
AddCategoryMapping ( "MOVIE|HD|1080p" , TorznabCatType . MoviesHD ) ; // HD 1080P
AddCategoryMapping ( "MOVIE|REMUXBR" , TorznabCatType . MoviesBluRay ) ; // REMUX BLURAY
AddCategoryMapping ( "MOVIE|FULLBR" , TorznabCatType . MoviesBluRay ) ; // FULL BLURAY
// Series
AddCategoryMapping ( "TV|SD|VOSTFR" , TorznabCatType . TV ) ; // SD VOSTFR
AddCategoryMapping ( "TV|HD|VOSTFR" , TorznabCatType . TVHD ) ; // HD VOSTFR
AddCategoryMapping ( "TV|SD|VF" , TorznabCatType . TVSD ) ; // SD VF
AddCategoryMapping ( "TV|HD|VF" , TorznabCatType . TVHD ) ; // HD VF
AddCategoryMapping ( "TV|PACK|FR" , TorznabCatType . TVOTHER ) ; // PACK FR
AddCategoryMapping ( "TV|PACK|VOSTFR" , TorznabCatType . TVOTHER ) ; // PACK VOSTFR
AddCategoryMapping ( "TV|EMISSIONS" , TorznabCatType . TVOTHER ) ; // EMISSIONS
// Anime
AddCategoryMapping ( "ANIME" , TorznabCatType . TVAnime ) ; // ANIME
// Documentaries
AddCategoryMapping ( "DOCS" , TorznabCatType . TVDocumentary ) ; // DOCS
// Music
AddCategoryMapping ( "MUSIC|FLAC" , TorznabCatType . AudioLossless ) ; // FLAC
AddCategoryMapping ( "MUSIC|MP3" , TorznabCatType . AudioMP3 ) ; // MP3
AddCategoryMapping ( "MUSIC|CONCERT" , TorznabCatType . AudioVideo ) ; // CONCERT
// Other
AddCategoryMapping ( "PC|APP" , TorznabCatType . PC ) ; // PC
AddCategoryMapping ( "PC|GAMES" , TorznabCatType . PCGames ) ; // GAMES
AddCategoryMapping ( "EBOOKS" , TorznabCatType . BooksEbook ) ; // EBOOKS
}
/// <summary>
/// Configure our WiHD Provider
/// </summary>
/// <param name="configJson">Our params in Json</param>
/// <returns>Configuration state</returns>
2017-06-28 05:31:38 +00:00
public override async Task < IndexerConfigurationStatus > ApplyConfiguration ( JToken configJson )
2017-04-15 08:45:10 +00:00
{
// Retrieve config values set by Jackett's user
LoadValuesFromJson ( configJson ) ;
// Check & Validate Config
validateConfig ( ) ;
// Setting our data for a better emulated browser (maximum security)
// TODO: Encoded Content not supported by Jackett at this time
// emulatedBrowserHeaders.Add("Accept-Encoding", "gzip, deflate");
// If we want to simulate a browser
2017-07-10 20:58:44 +00:00
if ( ConfigData . Browser . Value )
{
2017-04-15 08:45:10 +00:00
// Clean headers
emulatedBrowserHeaders . Clear ( ) ;
// Inject headers
emulatedBrowserHeaders . Add ( "Accept" , ConfigData . HeaderAccept . Value ) ;
emulatedBrowserHeaders . Add ( "Accept-Language" , ConfigData . HeaderAcceptLang . Value ) ;
emulatedBrowserHeaders . Add ( "DNT" , Convert . ToInt32 ( ConfigData . HeaderDNT . Value ) . ToString ( ) ) ;
emulatedBrowserHeaders . Add ( "Upgrade-Insecure-Requests" , Convert . ToInt32 ( ConfigData . HeaderUpgradeInsecure . Value ) . ToString ( ) ) ;
emulatedBrowserHeaders . Add ( "User-Agent" , ConfigData . HeaderUserAgent . Value ) ;
}
// Getting login form to retrieve CSRF token
var myRequest = new Utils . Clients . WebRequest ( )
{
Url = LoginUrl
} ;
// Add our headers to request
myRequest . Headers = emulatedBrowserHeaders ;
// Building login form data
var pairs = new Dictionary < string , string > {
{ "username" , ConfigData . Username . Value } ,
{ "password" , ConfigData . Password . Value } ,
{ "keeplogged" , "1" } ,
{ "login" , "Connexion" }
} ;
// Do the login
2017-07-10 20:58:44 +00:00
var request = new Utils . Clients . WebRequest ( )
{
2017-04-15 08:45:10 +00:00
PostData = pairs ,
Referer = LoginUrl ,
Type = RequestType . POST ,
Url = LoginUrl ,
Headers = emulatedBrowserHeaders
} ;
// Perform loggin
latencyNow ( ) ;
output ( "\nPerform loggin.. with " + LoginUrl ) ;
var response = await webclient . GetString ( request ) ;
// Test if we are logged in
await ConfigureIfOK ( response . Cookies , response . Cookies . Contains ( "session=" ) , ( ) = >
{
// Parse error page
CQ dom = response . Content ;
string message = dom [ ".warning" ] . Text ( ) . Split ( '.' ) . Reverse ( ) . Skip ( 1 ) . First ( ) ;
// Try left
string left = dom [ ".info" ] . Text ( ) . Trim ( ) ;
// Oops, unable to login
output ( "-> Login failed: \"" + message + "\" and " + left + " tries left before being banned for 6 hours !" , "error" ) ;
throw new ExceptionWithConfigData ( "Login failed: " + message , configData ) ;
} ) ;
output ( "-> Login Success" ) ;
return IndexerConfigurationStatus . RequiresTesting ;
}
/// <summary>
/// Execute our search query
/// </summary>
/// <param name="query">Query</param>
/// <returns>Releases</returns>
2017-07-03 05:15:47 +00:00
protected override async Task < IEnumerable < ReleaseInfo > > PerformQuery ( TorznabQuery query )
2017-04-15 08:45:10 +00:00
{
2018-03-01 11:38:27 +00:00
TimeZoneInfo . TransitionTime startTransition = TimeZoneInfo . TransitionTime . CreateFloatingDateRule ( new DateTime ( 1 , 1 , 1 , 3 , 0 , 0 ) , 3 , 5 , DayOfWeek . Sunday ) ;
TimeZoneInfo . TransitionTime endTransition = TimeZoneInfo . TransitionTime . CreateFloatingDateRule ( new DateTime ( 1 , 1 , 1 , 4 , 0 , 0 ) , 10 , 5 , DayOfWeek . Sunday ) ;
TimeSpan delta = new TimeSpan ( 1 , 0 , 0 ) ;
TimeZoneInfo . AdjustmentRule adjustment = TimeZoneInfo . AdjustmentRule . CreateAdjustmentRule ( new DateTime ( 1999 , 10 , 1 ) , DateTime . MaxValue . Date , delta , startTransition , endTransition ) ;
TimeZoneInfo . AdjustmentRule [ ] adjustments = { adjustment } ;
TimeZoneInfo FranceTz = TimeZoneInfo . CreateCustomTimeZone ( "W. Europe Standard Time" , new TimeSpan ( 1 , 0 , 0 ) , "(GMT+01:00) W. Europe Standard Time" , "W. Europe Standard Time" , "W. Europe DST Time" , adjustments ) ;
2017-04-15 08:45:10 +00:00
var releases = new List < ReleaseInfo > ( ) ;
var torrentRowList = new List < CQ > ( ) ;
var searchTerm = query . GetQueryString ( ) ;
var searchUrl = SearchUrl ;
int nbResults = 0 ;
int pageLinkCount = 0 ;
// Check cache first so we don't query the server (if search term used or not in dev mode)
2017-07-10 20:58:44 +00:00
if ( ! DevMode & & ! string . IsNullOrEmpty ( searchTerm ) )
2017-04-15 08:45:10 +00:00
{
lock ( cache )
{
// Remove old cache items
CleanCache ( ) ;
// Search in cache
var cachedResult = cache . Where ( i = > i . Query = = searchTerm ) . FirstOrDefault ( ) ;
if ( cachedResult ! = null )
return cachedResult . Results . Select ( s = > ( ReleaseInfo ) s . Clone ( ) ) . ToArray ( ) ;
}
}
// Build our query
var request = buildQuery ( searchTerm , query , searchUrl ) ;
// Getting results & Store content
2017-08-13 22:15:11 +00:00
fDom = await queryExec ( request ) ;
2017-04-15 08:45:10 +00:00
try
{
// Find torrent rows
var firstPageRows = findTorrentRows ( ) ;
// Add them to torrents list
torrentRowList . AddRange ( firstPageRows . Select ( fRow = > fRow . Cq ( ) ) ) ;
// Check if there are pagination links at bottom
Boolean pagination = ( fDom [ ".linkbox > a" ] . Length ! = 0 ) ;
// If pagination available
2017-07-10 20:58:44 +00:00
if ( pagination )
{
2017-04-15 08:45:10 +00:00
// Calculate numbers of pages available for this search query (Based on number results and number of torrents on first page)
pageLinkCount = ParseUtil . CoerceInt ( Regex . Match ( fDom [ ".linkbox > a" ] . Last ( ) . Attr ( "href" ) . ToString ( ) , @"\d+" ) . Value ) ;
// Calculate average number of results (based on torrents rows lenght on first page)
nbResults = firstPageRows . Count ( ) * pageLinkCount ;
}
2017-07-10 20:58:44 +00:00
else
{
2017-04-15 08:45:10 +00:00
// Check if we have a minimum of one result
if ( firstPageRows . Length > = 1 )
{
// Retrieve total count on our alone page
nbResults = firstPageRows . Count ( ) ;
pageLinkCount = 1 ;
}
else
{
output ( "\nNo result found for your query, please try another search term ...\n" , "info" ) ;
// No result found for this query
return releases ;
}
}
output ( "\nFound " + nbResults + " result(s) (+/- " + firstPageRows . Length + ") in " + pageLinkCount + " page(s) for this query !" ) ;
output ( "\nThere are " + firstPageRows . Length + " results on the first page !" ) ;
// If we have a term used for search and pagination result superior to one
if ( ! string . IsNullOrWhiteSpace ( query . GetQueryString ( ) ) & & pageLinkCount > 1 )
{
// Starting with page #2
for ( int i = 2 ; i < = Math . Min ( Int32 . Parse ( ConfigData . Pages . Value ) , pageLinkCount ) ; i + + )
{
output ( "\nProcessing page #" + i ) ;
// Request our page
latencyNow ( ) ;
// Build our query
var pageRequest = buildQuery ( searchTerm , query , searchUrl , i ) ;
// Getting results & Store content
2017-08-13 22:15:11 +00:00
fDom = await queryExec ( pageRequest ) ;
2017-04-15 08:45:10 +00:00
// Process page results
var additionalPageRows = findTorrentRows ( ) ;
// Add them to torrents list
torrentRowList . AddRange ( additionalPageRows . Select ( fRow = > fRow . Cq ( ) ) ) ;
}
}
// Loop on results
foreach ( CQ tRow in torrentRowList )
{
output ( "\n=>> Torrent #" + ( releases . Count + 1 ) ) ;
// ID
int id = ParseUtil . CoerceInt ( Regex . Match ( tRow . Find ( "td:eq(1) > a" ) . Attr ( "href" ) . ToString ( ) , @"\d+" ) . Value ) ;
output ( "ID: " + id ) ;
// Release Name
2017-08-13 22:15:11 +00:00
string name = tRow . Find ( "td:eq(1) > a" ) . Text ( ) ;
2019-05-05 21:13:10 +00:00
//issue #3847 replace multi keyword
if ( ! string . IsNullOrEmpty ( ReplaceMulti ) ) {
System . Text . RegularExpressions . Regex regex = new Regex ( "(?i)([\\.\\- ])MULTI([\\.\\- ])" ) ;
name = regex . Replace ( name , "$1" + ReplaceMulti + "$2" ) ;
}
2017-04-15 08:45:10 +00:00
output ( "Release: " + name ) ;
// Category
string categoryID = tRow . Find ( "td:eq(0) > a" ) . Attr ( "href" ) . Replace ( "torrents.php?cat[]=" , String . Empty ) ;
2017-08-13 22:15:11 +00:00
var newznab = MapTrackerCatToNewznab ( categoryID ) ;
output ( "Category: " + MapTrackerCatToNewznab ( categoryID ) . First ( ) . ToString ( ) + " (" + categoryID + ")" ) ;
2017-04-15 08:45:10 +00:00
// Seeders
int seeders = ParseUtil . CoerceInt ( Regex . Match ( tRow . Find ( "td:eq(5)" ) . Text ( ) , @"\d+" ) . Value ) ;
output ( "Seeders: " + seeders ) ;
// Leechers
int leechers = ParseUtil . CoerceInt ( Regex . Match ( tRow . Find ( "td:eq(6)" ) . Text ( ) , @"\d+" ) . Value ) ;
output ( "Leechers: " + leechers ) ;
// Completed
int completed = ParseUtil . CoerceInt ( Regex . Match ( tRow . Find ( "td:eq(5)" ) . Text ( ) , @"\d+" ) . Value ) ;
output ( "Completed: " + completed ) ;
// Size
string sizeStr = tRow . Find ( "td:eq(4)" ) . Text ( ) . Replace ( "Go" , "gb" ) . Replace ( "Mo" , "mb" ) . Replace ( "Ko" , "kb" ) ;
long size = ReleaseInfo . GetBytes ( sizeStr ) ;
output ( "Size: " + sizeStr + " (" + size + " bytes)" ) ;
// Publish DateToString
2018-03-01 11:38:27 +00:00
var datestr = tRow . Find ( "span.time" ) . Attr ( "title" ) ;
var dateLocal = DateTime . SpecifyKind ( DateTime . ParseExact ( datestr , "MMM dd yyyy, HH:mm" , CultureInfo . InvariantCulture ) , DateTimeKind . Unspecified ) ;
var date = TimeZoneInfo . ConvertTimeToUtc ( dateLocal , FranceTz ) ;
output ( "Released on: " + date ) ;
2017-04-15 08:45:10 +00:00
// Torrent Details URL
Uri detailsLink = new Uri ( TorrentDescriptionUrl + id ) ;
output ( "Details: " + detailsLink . AbsoluteUri ) ;
// Torrent Comments URL
Uri commentsLink = new Uri ( TorrentCommentUrl + id ) ;
output ( "Comments Link: " + commentsLink . AbsoluteUri ) ;
// Torrent Download URL
2017-08-13 22:15:11 +00:00
Uri downloadLink = null ;
string link = tRow . Find ( "td:eq(3) > a" ) . Attr ( "href" ) ;
if ( ! String . IsNullOrEmpty ( link ) )
{
// Download link available
downloadLink = new Uri ( SiteLink + link ) ;
output ( "Download Link: " + downloadLink . AbsoluteUri ) ;
}
else
{
// No download link available -- Must be on pending ( can't be downloaded now...)
output ( "Download Link: Not available, torrent pending ? Skipping ..." ) ;
continue ;
}
2017-04-15 08:45:10 +00:00
2017-08-13 22:15:11 +00:00
// Freeleech
int downloadVolumeFactor = 1 ;
2017-04-15 08:45:10 +00:00
if ( tRow . Find ( "img[alt=\"Freeleech\"]" ) . Length > = 1 )
2017-08-13 22:15:11 +00:00
{
downloadVolumeFactor = 0 ;
output ( "FreeLeech =)" ) ;
}
2017-04-15 08:45:10 +00:00
2017-08-13 22:15:11 +00:00
// Building release infos
var release = new ReleaseInfo ( )
{
Category = MapTrackerCatToNewznab ( categoryID . ToString ( ) ) ,
Title = name ,
Seeders = seeders ,
Peers = seeders + leechers ,
MinimumRatio = 1 ,
MinimumSeedTime = 172800 ,
PublishDate = date ,
Size = size ,
Guid = detailsLink ,
Comments = commentsLink ,
Link = downloadLink ,
UploadVolumeFactor = 1 ,
DownloadVolumeFactor = downloadVolumeFactor
} ;
2017-04-15 08:45:10 +00:00
releases . Add ( release ) ;
}
}
catch ( Exception ex )
{
OnParseError ( "Error, unable to parse result \n" + ex . StackTrace , ex ) ;
}
// Return found releases
return releases ;
}
/// <summary>
/// Build query to process
/// </summary>
/// <param name="term">Term to search</param>
/// <param name="query">Torznab Query for categories mapping</param>
/// <param name="url">Search url for provider</param>
/// <param name="page">Page number to request</param>
/// <returns>URL to query for parsing and processing results</returns>
private string buildQuery ( string term , TorznabQuery query , string url , int page = 0 )
{
var parameters = new NameValueCollection ( ) ;
List < string > categoriesList = MapTorznabCapsToTrackers ( query ) ;
string categories = null ;
// Check if we are processing a new page
if ( page > 0 )
{
// Adding page number to query
parameters . Add ( "page" , page . ToString ( ) ) ;
}
// Loop on Categories needed
foreach ( string category in categoriesList )
{
// If last, build !
if ( categoriesList . Last ( ) = = category )
{
// Adding previous categories to URL with latest category
2017-11-05 09:42:03 +00:00
parameters . Add ( Uri . EscapeDataString ( "cat[]" ) , WebUtility . UrlEncode ( category ) + categories ) ;
2017-04-15 08:45:10 +00:00
}
else
{
// Build categories parameter
2017-11-05 09:42:03 +00:00
categories + = "&" + Uri . EscapeDataString ( "cat[]" ) + "=" + WebUtility . UrlEncode ( category ) ;
2017-04-15 08:45:10 +00:00
}
}
// If search term provided
if ( ! string . IsNullOrWhiteSpace ( term ) )
{
// Add search term
2017-11-05 09:42:03 +00:00
parameters . Add ( "search" , WebUtility . UrlEncode ( term ) ) ;
2017-04-15 08:45:10 +00:00
}
else
{
2017-11-05 09:42:03 +00:00
parameters . Add ( "search" , WebUtility . UrlEncode ( "%" ) ) ;
2017-04-15 08:45:10 +00:00
// Showing all torrents (just for output function)
term = "all" ;
}
// Building our query -- Cannot use GetQueryString due to UrlEncode (generating wrong cat[] param)
url + = "?" + string . Join ( "&" , parameters . AllKeys . Select ( a = > a + "=" + parameters [ a ] ) ) ;
output ( "\nBuilded query for \"" + term + "\"... " + url ) ;
// Return our search url
return url ;
}
/// <summary>
/// Switch Method for Querying
/// </summary>
/// <param name="request">URL created by Query Builder</param>
/// <returns>Results from query</returns>
2017-08-13 22:15:11 +00:00
private async Task < String > queryExec ( string request )
2017-04-15 08:45:10 +00:00
{
2017-08-13 22:15:11 +00:00
String results = null ;
2017-04-15 08:45:10 +00:00
// Switch in we are in DEV mode with Hard Drive Cache or not
if ( DevMode & & CacheMode )
{
// Check Cache before querying and load previous results if available
results = await queryCache ( request ) ;
}
else
{
// Querying tracker directly
results = await queryTracker ( request ) ;
}
return results ;
}
/// <summary>
/// Get Torrents Page from Cache by Query Provided
/// </summary>
/// <param name="request">URL created by Query Builder</param>
/// <returns>Results from query</returns>
2017-08-13 22:15:11 +00:00
private async Task < String > queryCache ( string request )
2017-04-15 08:45:10 +00:00
{
2017-08-13 22:15:11 +00:00
String results ;
2017-04-15 08:45:10 +00:00
// Create Directory if not exist
2017-08-13 22:15:11 +00:00
System . IO . Directory . CreateDirectory ( Directory ) ;
2017-04-15 08:45:10 +00:00
// Clean Storage Provider Directory from outdated cached queries
cleanCacheStorage ( ) ;
2017-08-13 22:15:11 +00:00
// File Name
string fileName = StringUtil . HashSHA1 ( request ) + ".json" ;
2017-04-15 08:45:10 +00:00
// Create fingerprint for request
2017-08-13 22:15:11 +00:00
string file = Path . Combine ( Directory , fileName ) ;
2017-04-15 08:45:10 +00:00
// Checking modes states
2017-08-13 22:15:11 +00:00
if ( File . Exists ( file ) )
2017-04-15 08:45:10 +00:00
{
// File exist... loading it right now !
2017-08-13 22:15:11 +00:00
output ( "Loading results from hard drive cache ..." + fileName ) ;
try
{
using ( StreamReader fileReader = File . OpenText ( file ) )
{
JsonSerializer serializer = new JsonSerializer ( ) ;
results = ( String ) serializer . Deserialize ( fileReader , typeof ( String ) ) ;
}
}
catch ( Exception e )
{
output ( "Error loading cached results ! " + e . Message , "error" ) ;
results = null ;
}
2017-04-15 08:45:10 +00:00
}
else
{
// No cached file found, querying tracker directly
results = await queryTracker ( request ) ;
// Cached file didn't exist for our query, writing it right now !
2017-08-13 22:15:11 +00:00
output ( "Writing results to hard drive cache ..." + fileName ) ;
using ( StreamWriter fileWriter = File . CreateText ( file ) )
{
JsonSerializer serializer = new JsonSerializer ( ) ;
serializer . Serialize ( fileWriter , results ) ;
}
2017-04-15 08:45:10 +00:00
}
return results ;
}
/// <summary>
/// Get Torrents Page from Tracker by Query Provided
/// </summary>
/// <param name="request">URL created by Query Builder</param>
/// <returns>Results from query</returns>
2017-08-13 22:15:11 +00:00
private async Task < String > queryTracker ( string request )
2017-04-15 08:45:10 +00:00
{
WebClientStringResult results = null ;
// Cache mode not enabled or cached file didn't exist for our query
output ( "\nQuerying tracker for results...." ) ;
// Request our first page
latencyNow ( ) ;
results = await RequestStringWithCookiesAndRetry ( request , null , null , emulatedBrowserHeaders ) ;
// Return results from tracker
2017-08-13 22:15:11 +00:00
return results . Content ;
2017-04-15 08:45:10 +00:00
}
/// <summary>
/// Clean Hard Drive Cache Storage
/// </summary>
/// <param name="force">Force Provider Folder deletion</param>
2017-08-13 22:15:11 +00:00
private void cleanCacheStorage ( bool force = false )
2017-04-15 08:45:10 +00:00
{
// Check cleaning method
2017-07-10 20:58:44 +00:00
if ( force )
2017-04-15 08:45:10 +00:00
{
// Deleting Provider Storage folder and all files recursively
output ( "\nDeleting Provider Storage folder and all files recursively ..." ) ;
2017-07-10 20:58:44 +00:00
2017-04-15 08:45:10 +00:00
// Check if directory exist
2017-08-13 22:15:11 +00:00
if ( System . IO . Directory . Exists ( Directory ) )
2017-04-15 08:45:10 +00:00
{
// Delete storage directory of provider
2017-08-13 22:15:11 +00:00
System . IO . Directory . Delete ( Directory , true ) ;
2017-04-15 08:45:10 +00:00
output ( "-> Storage folder deleted successfully." ) ;
}
else
{
// No directory, so nothing to do
output ( "-> No Storage folder found for this provider !" ) ;
}
}
else
{
2017-08-13 22:15:11 +00:00
var i = 0 ;
2017-04-15 08:45:10 +00:00
// Check if there is file older than ... and delete them
output ( "\nCleaning Provider Storage folder... in progress." ) ;
2017-08-13 22:15:11 +00:00
System . IO . Directory . GetFiles ( Directory )
. Select ( f = > new FileInfo ( f ) )
2017-04-15 08:45:10 +00:00
. Where ( f = > f . LastAccessTime < DateTime . Now . AddMilliseconds ( - Convert . ToInt32 ( ConfigData . HardDriveCacheKeepTime . Value ) ) )
. ToList ( )
2017-07-10 20:58:44 +00:00
. ForEach ( f = >
{
2017-04-15 08:45:10 +00:00
output ( "Deleting cached file << " + f . Name + " >> ... done." ) ;
f . Delete ( ) ;
i + + ;
2017-07-10 20:58:44 +00:00
} ) ;
2017-04-15 08:45:10 +00:00
// Inform on what was cleaned during process
2017-07-10 20:58:44 +00:00
if ( i > 0 )
{
2017-04-15 08:45:10 +00:00
output ( "-> Deleted " + i + " cached files during cleaning." ) ;
}
2017-07-10 20:58:44 +00:00
else
{
2017-04-15 08:45:10 +00:00
output ( "-> Nothing deleted during cleaning." ) ;
}
}
}
/// <summary>
/// Generate a random fake latency to avoid detection on tracker side
/// </summary>
private void latencyNow ( )
{
// Need latency ?
2017-07-10 20:58:44 +00:00
if ( Latency )
2017-04-15 08:45:10 +00:00
{
// Generate a random value in our range
var random = new Random ( DateTime . Now . Millisecond ) ;
int waiting = random . Next ( Convert . ToInt32 ( ConfigData . LatencyStart . Value ) , Convert . ToInt32 ( ConfigData . LatencyEnd . Value ) ) ;
output ( "\nLatency Faker => Sleeping for " + waiting + " ms..." ) ;
// Sleep now...
System . Threading . Thread . Sleep ( waiting ) ;
}
}
/// <summary>
/// Find torrent rows in search pages
/// </summary>
/// <returns>JQuery Object</returns>
private CQ findTorrentRows ( )
{
// Return all occurencis of torrents found
return fDom [ ".torrent_table > tbody > tr" ] . Not ( ".colhead" ) ;
}
/// <summary>
/// Output message for logging or developpment (console)
/// </summary>
/// <param name="message">Message to output</param>
/// <param name="level">Level for Logger</param>
private void output ( string message , string level = "debug" )
{
// Check if we are in dev mode
2017-07-10 20:58:44 +00:00
if ( DevMode )
2017-04-15 08:45:10 +00:00
{
// Output message to console
Console . WriteLine ( message ) ;
}
else
{
// Send message to logger with level
switch ( level )
{
default :
goto case "debug" ;
case "debug" :
// Only if Debug Level Enabled on Jackett
2017-11-05 09:42:03 +00:00
if ( logger . IsDebugEnabled )
2017-04-15 08:45:10 +00:00
{
logger . Debug ( message ) ;
}
break ;
2017-10-29 06:21:18 +00:00
2017-04-15 08:45:10 +00:00
case "info" :
logger . Info ( message ) ;
break ;
2017-10-29 06:21:18 +00:00
2017-04-15 08:45:10 +00:00
case "error" :
logger . Error ( message ) ;
break ;
}
}
}
/// <summary>
/// Validate Config entered by user on Jackett
/// </summary>
private void validateConfig ( )
{
output ( "\nValidating Settings ... \n" ) ;
// Check Username Setting
if ( string . IsNullOrEmpty ( ConfigData . Username . Value ) )
{
throw new ExceptionWithConfigData ( "You must provide a username for this tracker to login !" , ConfigData ) ;
}
else
{
output ( "Validated Setting -- Username (auth) => " + ConfigData . Username . Value . ToString ( ) ) ;
}
// Check Password Setting
if ( string . IsNullOrEmpty ( ConfigData . Password . Value ) )
{
throw new ExceptionWithConfigData ( "You must provide a password with your username for this tracker to login !" , ConfigData ) ;
}
else
{
output ( "Validated Setting -- Password (auth) => " + ConfigData . Password . Value . ToString ( ) ) ;
}
// Check Max Page Setting
if ( ! string . IsNullOrEmpty ( ConfigData . Pages . Value ) )
{
try
{
output ( "Validated Setting -- Max Pages => " + Convert . ToInt32 ( ConfigData . Pages . Value ) ) ;
}
catch ( Exception )
{
throw new ExceptionWithConfigData ( "Please enter a numeric maximum number of pages to crawl !" , ConfigData ) ;
}
}
else
{
throw new ExceptionWithConfigData ( "Please enter a maximum number of pages to crawl !" , ConfigData ) ;
}
// Check Latency Setting
if ( ConfigData . Latency . Value )
{
output ( "\nValidated Setting -- Latency Simulation enabled" ) ;
// Check Latency Start Setting
if ( ! string . IsNullOrEmpty ( ConfigData . LatencyStart . Value ) )
{
try
{
output ( "Validated Setting -- Latency Start => " + Convert . ToInt32 ( ConfigData . LatencyStart . Value ) ) ;
}
catch ( Exception )
{
throw new ExceptionWithConfigData ( "Please enter a numeric latency start in ms !" , ConfigData ) ;
}
}
else
{
throw new ExceptionWithConfigData ( "Latency Simulation enabled, Please enter a start latency !" , ConfigData ) ;
}
// Check Latency End Setting
if ( ! string . IsNullOrEmpty ( ConfigData . LatencyEnd . Value ) )
{
try
{
output ( "Validated Setting -- Latency End => " + Convert . ToInt32 ( ConfigData . LatencyEnd . Value ) ) ;
}
catch ( Exception )
{
throw new ExceptionWithConfigData ( "Please enter a numeric latency end in ms !" , ConfigData ) ;
}
}
else
{
throw new ExceptionWithConfigData ( "Latency Simulation enabled, Please enter a end latency !" , ConfigData ) ;
}
}
// Check Browser Setting
if ( ConfigData . Browser . Value )
{
output ( "\nValidated Setting -- Browser Simulation enabled" ) ;
// Check ACCEPT header Setting
if ( string . IsNullOrEmpty ( ConfigData . HeaderAccept . Value ) )
{
throw new ExceptionWithConfigData ( "Browser Simulation enabled, Please enter an ACCEPT header !" , ConfigData ) ;
}
else
{
output ( "Validated Setting -- ACCEPT (header) => " + ConfigData . HeaderAccept . Value . ToString ( ) ) ;
}
// Check ACCEPT-LANG header Setting
if ( string . IsNullOrEmpty ( ConfigData . HeaderAcceptLang . Value ) )
{
throw new ExceptionWithConfigData ( "Browser Simulation enabled, Please enter an ACCEPT-LANG header !" , ConfigData ) ;
}
else
{
output ( "Validated Setting -- ACCEPT-LANG (header) => " + ConfigData . HeaderAcceptLang . Value . ToString ( ) ) ;
}
// Check USER-AGENT header Setting
if ( string . IsNullOrEmpty ( ConfigData . HeaderUserAgent . Value ) )
{
throw new ExceptionWithConfigData ( "Browser Simulation enabled, Please enter an USER-AGENT header !" , ConfigData ) ;
}
else
{
output ( "Validated Setting -- USER-AGENT (header) => " + ConfigData . HeaderUserAgent . Value . ToString ( ) ) ;
}
}
// Check Dev Cache Settings
if ( ConfigData . HardDriveCache . Value = = true )
{
output ( "\nValidated Setting -- DEV Hard Drive Cache enabled" ) ;
// Check if Dev Mode enabled !
if ( ! ConfigData . DevMode . Value )
{
throw new ExceptionWithConfigData ( "Hard Drive is enabled but not in DEV MODE, Please enable DEV MODE !" , ConfigData ) ;
}
// Check Cache Keep Time Setting
if ( ! string . IsNullOrEmpty ( ConfigData . HardDriveCacheKeepTime . Value ) )
{
try
{
output ( "Validated Setting -- Cache Keep Time (ms) => " + Convert . ToInt32 ( ConfigData . HardDriveCacheKeepTime . Value ) ) ;
}
catch ( Exception )
{
throw new ExceptionWithConfigData ( "Please enter a numeric hard drive keep time in ms !" , ConfigData ) ;
}
}
else
{
throw new ExceptionWithConfigData ( "Hard Drive Cache enabled, Please enter a maximum keep time for cache !" , ConfigData ) ;
}
}
else
{
// Delete cache if previously existed
cleanCacheStorage ( true ) ;
}
}
}
2016-01-09 02:06:21 +00:00
}