core: Apply declarations styles (#7166)

This commit is contained in:
Cory 2020-02-10 16:16:19 -06:00 committed by GitHub
parent e13cee2e95
commit 348dddfbee
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
149 changed files with 1039 additions and 1067 deletions

View File

@ -25,8 +25,8 @@ namespace DateTimeRoutines
/// <returns>seconds</returns>
public static uint GetSecondsSinceUnixEpoch(this DateTime date_time)
{
TimeSpan t = date_time - new DateTime(1970, 1, 1);
int ss = (int)t.TotalSeconds;
var t = date_time - new DateTime(1970, 1, 1);
var ss = (int)t.TotalSeconds;
if (ss < 0)
return 0;
return (uint)ss;
@ -44,39 +44,39 @@ namespace DateTimeRoutines
/// <summary>
/// Index of first char of a date substring found in the string
/// </summary>
readonly public int IndexOfDate = -1;
public readonly int IndexOfDate = -1;
/// <summary>
/// Length a date substring found in the string
/// </summary>
readonly public int LengthOfDate = -1;
public readonly int LengthOfDate = -1;
/// <summary>
/// Index of first char of a time substring found in the string
/// </summary>
readonly public int IndexOfTime = -1;
public readonly int IndexOfTime = -1;
/// <summary>
/// Length of a time substring found in the string
/// </summary>
readonly public int LengthOfTime = -1;
public readonly int LengthOfTime = -1;
/// <summary>
/// DateTime found in the string
/// </summary>
readonly public DateTime DateTime;
public readonly DateTime DateTime;
/// <summary>
/// True if a date was found within the string
/// </summary>
readonly public bool IsDateFound;
public readonly bool IsDateFound;
/// <summary>
/// True if a time was found within the string
/// </summary>
readonly public bool IsTimeFound;
public readonly bool IsTimeFound;
/// <summary>
/// UTC offset if it was found within the string
/// </summary>
readonly public TimeSpan UtcOffset;
public readonly TimeSpan UtcOffset;
/// <summary>
/// True if UTC offset was found in the string
/// </summary>
readonly public bool IsUtcOffsetFound;
public readonly bool IsUtcOffsetFound;
/// <summary>
/// Utc gotten from DateTime if IsUtcOffsetFound is True
/// </summary>
@ -113,7 +113,7 @@ namespace DateTimeRoutines
{
if (index_of_date < 0)//to avoid negative date exception when date is undefined
{
TimeSpan ts = date_time.TimeOfDay + utc_offset;
var ts = date_time.TimeOfDay + utc_offset;
if (ts < new TimeSpan(0))
UtcDateTime = new DateTime(1, 1, 2) + ts;
else
@ -146,7 +146,8 @@ namespace DateTimeRoutines
return _DefaultDate;
}
}
static DateTime _DefaultDate = DateTime.Now;
private static DateTime _DefaultDate = DateTime.Now;
/// <summary>
/// If true then DefaultDate property is ignored and DefaultDate is always DateTime.Now
@ -183,10 +184,9 @@ namespace DateTimeRoutines
/// <param name="default_format">format to be used preferably in ambivalent instances</param>
/// <param name="date_time">parsed date-time output</param>
/// <returns>true if both date and time were found, else false</returns>
static public bool TryParseDateTime(this string str, DateTimeFormat default_format, out DateTime date_time)
public static bool TryParseDateTime(this string str, DateTimeFormat default_format, out DateTime date_time)
{
ParsedDateTime parsed_date_time;
if (!TryParseDateTime(str, default_format, out parsed_date_time))
if (!TryParseDateTime(str, default_format, out ParsedDateTime parsed_date_time))
{
date_time = new DateTime(1, 1, 1);
return false;
@ -204,10 +204,9 @@ namespace DateTimeRoutines
/// <param name="default_format">format to be used preferably in ambivalent instances</param>
/// <param name="date_time">parsed date-time output</param>
/// <returns>true if date and/or time was found, else false</returns>
static public bool TryParseDateOrTime(this string str, DateTimeFormat default_format, out DateTime date_time)
public static bool TryParseDateOrTime(this string str, DateTimeFormat default_format, out DateTime date_time)
{
ParsedDateTime parsed_date_time;
if (!TryParseDateOrTime(str, default_format, out parsed_date_time))
if (!TryParseDateOrTime(str, default_format, out ParsedDateTime parsed_date_time))
{
date_time = new DateTime(1, 1, 1);
return false;
@ -226,8 +225,7 @@ namespace DateTimeRoutines
/// <returns>true if time was found, else false</returns>
public static bool TryParseTime(this string str, DateTimeFormat default_format, out DateTime time)
{
ParsedDateTime parsed_time;
if (!TryParseTime(str, default_format, out parsed_time, null))
if (!TryParseTime(str, default_format, out var parsed_time, null))
{
time = new DateTime(1, 1, 1);
return false;
@ -245,10 +243,9 @@ namespace DateTimeRoutines
/// <param name="default_format">format to be used preferably in ambivalent instances</param>
/// <param name="date">parsed date output</param>
/// <returns>true if date was found, else false</returns>
static public bool TryParseDate(this string str, DateTimeFormat default_format, out DateTime date)
public static bool TryParseDate(this string str, DateTimeFormat default_format, out DateTime date)
{
ParsedDateTime parsed_date;
if (!TryParseDate(str, default_format, out parsed_date))
if (!TryParseDate(str, default_format, out ParsedDateTime parsed_date))
{
date = new DateTime(1, 1, 1);
return false;
@ -268,7 +265,7 @@ namespace DateTimeRoutines
/// <param name="default_format">format to be used preferably in ambivalent instances</param>
/// <param name="parsed_date_time">parsed date-time output</param>
/// <returns>true if both date and time were found, else false</returns>
static public bool TryParseDateTime(this string str, DateTimeFormat default_format, out ParsedDateTime parsed_date_time)
public static bool TryParseDateTime(this string str, DateTimeFormat default_format, out ParsedDateTime parsed_date_time)
{
if (DateTimeRoutines.TryParseDateOrTime(str, default_format, out parsed_date_time)
&& parsed_date_time.IsDateFound
@ -288,7 +285,7 @@ namespace DateTimeRoutines
/// <param name="default_format">format to be used preferably in ambivalent instances</param>
/// <param name="parsed_time">parsed date-time output</param>
/// <returns>true if time was found, else false</returns>
static public bool TryParseTime(this string str, DateTimeFormat default_format, out ParsedDateTime parsed_time)
public static bool TryParseTime(this string str, DateTimeFormat default_format, out ParsedDateTime parsed_time)
{
return TryParseTime(str, default_format, out parsed_time, null);
}
@ -302,30 +299,30 @@ namespace DateTimeRoutines
/// <param name="default_format">format to be used preferably in ambivalent instances</param>
/// <param name="parsed_date_time">parsed date-time output</param>
/// <returns>true if date or time was found, else false</returns>
static public bool TryParseDateOrTime(this string str, DateTimeFormat default_format, out ParsedDateTime parsed_date_time)
public static bool TryParseDateOrTime(this string str, DateTimeFormat default_format, out ParsedDateTime parsed_date_time)
{
parsed_date_time = null;
ParsedDateTime parsed_date;
ParsedDateTime parsed_time;
if (!TryParseDate(str, default_format, out parsed_date))
if (!TryParseDate(str, default_format, out
ParsedDateTime parsed_date))
{
if (!TryParseTime(str, default_format, out parsed_time, null))
return false;
DateTime date_time = new DateTime(DefaultDate.Year, DefaultDate.Month, DefaultDate.Day, parsed_time.DateTime.Hour, parsed_time.DateTime.Minute, parsed_time.DateTime.Second);
var date_time = new DateTime(DefaultDate.Year, DefaultDate.Month, DefaultDate.Day, parsed_time.DateTime.Hour, parsed_time.DateTime.Minute, parsed_time.DateTime.Second);
parsed_date_time = new ParsedDateTime(-1, -1, parsed_time.IndexOfTime, parsed_time.LengthOfTime, date_time, parsed_time.UtcOffset);
}
else
{
if (!TryParseTime(str, default_format, out parsed_time, parsed_date))
{
DateTime date_time = new DateTime(parsed_date.DateTime.Year, parsed_date.DateTime.Month, parsed_date.DateTime.Day, 0, 0, 0);
var date_time = new DateTime(parsed_date.DateTime.Year, parsed_date.DateTime.Month, parsed_date.DateTime.Day, 0, 0, 0);
parsed_date_time = new ParsedDateTime(parsed_date.IndexOfDate, parsed_date.LengthOfDate, -1, -1, date_time);
}
else
{
DateTime date_time = new DateTime(parsed_date.DateTime.Year, parsed_date.DateTime.Month, parsed_date.DateTime.Day, parsed_time.DateTime.Hour, parsed_time.DateTime.Minute, parsed_time.DateTime.Second);
var date_time = new DateTime(parsed_date.DateTime.Year, parsed_date.DateTime.Month, parsed_date.DateTime.Day, parsed_time.DateTime.Hour, parsed_time.DateTime.Minute, parsed_time.DateTime.Second);
parsed_date_time = new ParsedDateTime(parsed_date.IndexOfDate, parsed_date.LengthOfDate, parsed_time.IndexOfTime, parsed_time.LengthOfTime, date_time, parsed_time.UtcOffset);
}
}
@ -385,15 +382,15 @@ namespace DateTimeRoutines
//try
//{
int hour = int.Parse(m.Groups["hour"].Value);
var hour = int.Parse(m.Groups["hour"].Value);
if (hour < 0 || hour > 23)
return false;
int minute = int.Parse(m.Groups["minute"].Value);
var minute = int.Parse(m.Groups["minute"].Value);
if (minute < 0 || minute > 59)
return false;
int second = 0;
var second = 0;
if (!string.IsNullOrEmpty(m.Groups["second"].Value))
{
second = int.Parse(m.Groups["second"].Value);
@ -406,15 +403,15 @@ namespace DateTimeRoutines
else if (string.Compare(m.Groups["ampm"].Value, "AM", true) == 0 && hour == 12)
hour -= 12;
DateTime date_time = new DateTime(1, 1, 1, hour, minute, second);
var date_time = new DateTime(1, 1, 1, hour, minute, second);
if (m.Groups["offset_hh"].Success)
{
int offset_hh = int.Parse(m.Groups["offset_hh"].Value);
int offset_mm = 0;
var offset_hh = int.Parse(m.Groups["offset_hh"].Value);
var offset_mm = 0;
if (m.Groups["offset_mm"].Success)
offset_mm = int.Parse(m.Groups["offset_mm"].Value);
TimeSpan utc_offset = new TimeSpan(offset_hh, offset_mm, 0);
var utc_offset = new TimeSpan(offset_hh, offset_mm, 0);
if (m.Groups["offset_sign"].Value == "-")
utc_offset = -utc_offset;
parsed_time = new ParsedDateTime(-1, -1, m.Index, m.Length, date_time, utc_offset);
@ -461,7 +458,7 @@ namespace DateTimeRoutines
/// <param name="default_format">format to be used preferably in ambivalent instances</param>
/// <param name="parsed_date">parsed date output</param>
/// <returns>true if date was found, else false</returns>
static public bool TryParseDate(this string str, DateTimeFormat default_format, out ParsedDateTime parsed_date)
public static bool TryParseDate(this string str, DateTimeFormat default_format, out ParsedDateTime parsed_date)
{
parsed_date = null;
@ -469,7 +466,7 @@ namespace DateTimeRoutines
return false;
//look for dd/mm/yy
Match m = Regex.Match(str, @"(?<=^|[^\d])(?'day'\d{1,2})\s*(?'separator'[\\/\.])+\s*(?'month'\d{1,2})\s*\'separator'+\s*(?'year'\d{2}|\d{4})(?=$|[^\d])", RegexOptions.Compiled | RegexOptions.IgnoreCase);
var m = Regex.Match(str, @"(?<=^|[^\d])(?'day'\d{1,2})\s*(?'separator'[\\/\.])+\s*(?'month'\d{1,2})\s*\'separator'+\s*(?'year'\d{2}|\d{4})(?=$|[^\d])", RegexOptions.Compiled | RegexOptions.IgnoreCase);
if (m.Success)
{
DateTime date;
@ -491,8 +488,7 @@ namespace DateTimeRoutines
m = Regex.Match(str, @"(?<=^|[^\d])(?'year'\d{2}|\d{4})\s*(?'separator'[\-])\s*(?'month'\d{1,2})\s*\'separator'+\s*(?'day'\d{1,2})(?=$|[^\d])", RegexOptions.Compiled | RegexOptions.IgnoreCase);
if (m.Success)
{
DateTime date;
if (!convert_to_date(int.Parse(m.Groups["year"].Value), int.Parse(m.Groups["month"].Value), int.Parse(m.Groups["day"].Value), out date))
if (!convert_to_date(int.Parse(m.Groups["year"].Value), int.Parse(m.Groups["month"].Value), int.Parse(m.Groups["day"].Value), out var date))
return false;
parsed_date = new ParsedDateTime(m.Index, m.Length, -1, -1, date);
return true;
@ -514,9 +510,9 @@ namespace DateTimeRoutines
m = Regex.Match(str, @"(?:^|[^\d\w])(?'month'Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)[uarychilestmbro]*\s+(?'day'\d{1,2})(?:-?st|-?th|-?rd|-?nd)?(?:\s*,?\s*(?'year'\d{4}))?(?=$|[^\d\w])", RegexOptions.Compiled | RegexOptions.IgnoreCase);
if (m.Success)
{
int month = -1;
int index_of_date = m.Index;
int length_of_date = m.Length;
var month = -1;
var index_of_date = m.Index;
var length_of_date = m.Length;
switch (m.Groups["month"].Value)
{
@ -575,8 +571,7 @@ namespace DateTimeRoutines
else
year = DefaultDate.Year;
DateTime date;
if (!convert_to_date(year, month, int.Parse(m.Groups["day"].Value), out date))
if (!convert_to_date(year, month, int.Parse(m.Groups["day"].Value), out var date))
return false;
parsed_date = new ParsedDateTime(index_of_date, length_of_date, -1, -1, date);
return true;
@ -585,7 +580,7 @@ namespace DateTimeRoutines
return false;
}
static bool convert_to_date(int year, int month, int day, out DateTime date)
private static bool convert_to_date(int year, int month, int day, out DateTime date)
{
if (year >= 100)
{

View File

@ -12,7 +12,7 @@ namespace Jackett.Common.Helpers
return string.Empty;
}
byte[] bytes = encoding.GetBytes(searchString);
var bytes = encoding.GetBytes(searchString);
return encoding.GetString(WebUtility.UrlEncodeToBytes(bytes, 0, bytes.Length));
}

View File

@ -38,7 +38,7 @@ namespace Jackett.Common.Indexers
private bool CacheMode { get { return ConfigData.HardDriveCache.Value; } }
private static string Directory => Path.Combine(Path.GetTempPath(), Assembly.GetExecutingAssembly().GetName().Name.ToLower(), MethodBase.GetCurrentMethod().DeclaringType?.Name.ToLower());
private Dictionary<string, string> emulatedBrowserHeaders = new Dictionary<string, string>();
private readonly Dictionary<string, string> emulatedBrowserHeaders = new Dictionary<string, string>();
private CQ fDom = null;
private ConfigurationDataAbnormal ConfigData
@ -171,10 +171,10 @@ namespace Jackett.Common.Indexers
{
// Parse error page
CQ dom = response.Content;
string message = dom[".warning"].Text().Split('.').Reverse().Skip(1).First();
var message = dom[".warning"].Text().Split('.').Reverse().Skip(1).First();
// Try left
string left = dom[".info"].Text().Trim();
var left = dom[".info"].Text().Trim();
// Oops, unable to login
output("-> Login failed: \"" + message + "\" and " + left + " tries left before being banned for 6 hours !", "error");
@ -193,19 +193,19 @@ namespace Jackett.Common.Indexers
/// <returns>Releases</returns>
protected override async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query)
{
TimeZoneInfo.TransitionTime startTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 3, 0, 0), 3, 5, DayOfWeek.Sunday);
TimeZoneInfo.TransitionTime endTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 4, 0, 0), 10, 5, DayOfWeek.Sunday);
TimeSpan delta = new TimeSpan(1, 0, 0);
TimeZoneInfo.AdjustmentRule adjustment = TimeZoneInfo.AdjustmentRule.CreateAdjustmentRule(new DateTime(1999, 10, 1), DateTime.MaxValue.Date, delta, startTransition, endTransition);
var startTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 3, 0, 0), 3, 5, DayOfWeek.Sunday);
var endTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 4, 0, 0), 10, 5, DayOfWeek.Sunday);
var delta = new TimeSpan(1, 0, 0);
var adjustment = TimeZoneInfo.AdjustmentRule.CreateAdjustmentRule(new DateTime(1999, 10, 1), DateTime.MaxValue.Date, delta, startTransition, endTransition);
TimeZoneInfo.AdjustmentRule[] adjustments = { adjustment };
TimeZoneInfo FranceTz = TimeZoneInfo.CreateCustomTimeZone("W. Europe Standard Time", new TimeSpan(1, 0, 0), "(GMT+01:00) W. Europe Standard Time", "W. Europe Standard Time", "W. Europe DST Time", adjustments);
var FranceTz = TimeZoneInfo.CreateCustomTimeZone("W. Europe Standard Time", new TimeSpan(1, 0, 0), "(GMT+01:00) W. Europe Standard Time", "W. Europe Standard Time", "W. Europe DST Time", adjustments);
var releases = new List<ReleaseInfo>();
var torrentRowList = new List<CQ>();
var searchTerm = query.GetQueryString();
var searchUrl = SearchUrl;
int nbResults = 0;
int pageLinkCount = 0;
var nbResults = 0;
var pageLinkCount = 0;
// Check cache first so we don't query the server (if search term used or not in dev mode)
if (!DevMode && !string.IsNullOrEmpty(searchTerm))
@ -237,7 +237,7 @@ namespace Jackett.Common.Indexers
torrentRowList.AddRange(firstPageRows.Select(fRow => fRow.Cq()));
// Check if there are pagination links at bottom
Boolean pagination = (fDom[".linkbox > a"].Length != 0);
var pagination = (fDom[".linkbox > a"].Length != 0);
// If pagination available
if (pagination)
@ -271,7 +271,7 @@ namespace Jackett.Common.Indexers
if (!string.IsNullOrWhiteSpace(query.GetQueryString()) && pageLinkCount > 1)
{
// Starting with page #2
for (int i = 2; i <= Math.Min(Int32.Parse(ConfigData.Pages.Value), pageLinkCount); i++)
for (var i = 2; i <= Math.Min(int.Parse(ConfigData.Pages.Value), pageLinkCount); i++)
{
output("\nProcessing page #" + i);
@ -293,44 +293,44 @@ namespace Jackett.Common.Indexers
}
// Loop on results
foreach (CQ tRow in torrentRowList)
foreach (var tRow in torrentRowList)
{
output("\n=>> Torrent #" + (releases.Count + 1));
// ID
int id = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(1) > a").Attr("href").ToString(), @"\d+").Value);
var id = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(1) > a").Attr("href").ToString(), @"\d+").Value);
output("ID: " + id);
// Release Name
string name = tRow.Find("td:eq(1) > a").Text();
var name = tRow.Find("td:eq(1) > a").Text();
//issue #3847 replace multi keyword
if (!string.IsNullOrEmpty(ReplaceMulti))
{
System.Text.RegularExpressions.Regex regex = new Regex("(?i)([\\.\\- ])MULTI([\\.\\- ])");
var regex = new Regex("(?i)([\\.\\- ])MULTI([\\.\\- ])");
name = regex.Replace(name, "$1" + ReplaceMulti + "$2");
}
output("Release: " + name);
// Category
string categoryID = tRow.Find("td:eq(0) > a").Attr("href").Replace("torrents.php?cat[]=", String.Empty);
var categoryID = tRow.Find("td:eq(0) > a").Attr("href").Replace("torrents.php?cat[]=", string.Empty);
var newznab = MapTrackerCatToNewznab(categoryID);
output("Category: " + MapTrackerCatToNewznab(categoryID).First().ToString() + " (" + categoryID + ")");
// Seeders
int seeders = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(5)").Text(), @"\d+").Value);
var seeders = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(5)").Text(), @"\d+").Value);
output("Seeders: " + seeders);
// Leechers
int leechers = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(6)").Text(), @"\d+").Value);
var leechers = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(6)").Text(), @"\d+").Value);
output("Leechers: " + leechers);
// Completed
int completed = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(5)").Text(), @"\d+").Value);
var completed = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(5)").Text(), @"\d+").Value);
output("Completed: " + completed);
// Size
string sizeStr = tRow.Find("td:eq(4)").Text().Replace("Go", "gb").Replace("Mo", "mb").Replace("Ko", "kb");
long size = ReleaseInfo.GetBytes(sizeStr);
var sizeStr = tRow.Find("td:eq(4)").Text().Replace("Go", "gb").Replace("Mo", "mb").Replace("Ko", "kb");
var size = ReleaseInfo.GetBytes(sizeStr);
output("Size: " + sizeStr + " (" + size + " bytes)");
// Publish DateToString
@ -340,17 +340,17 @@ namespace Jackett.Common.Indexers
output("Released on: " + date);
// Torrent Details URL
Uri detailsLink = new Uri(TorrentDescriptionUrl + id);
var detailsLink = new Uri(TorrentDescriptionUrl + id);
output("Details: " + detailsLink.AbsoluteUri);
// Torrent Comments URL
Uri commentsLink = new Uri(TorrentCommentUrl + id);
var commentsLink = new Uri(TorrentCommentUrl + id);
output("Comments Link: " + commentsLink.AbsoluteUri);
// Torrent Download URL
Uri downloadLink = null;
string link = tRow.Find("td:eq(3) > a").Attr("href");
if (!String.IsNullOrEmpty(link))
var link = tRow.Find("td:eq(3) > a").Attr("href");
if (!string.IsNullOrEmpty(link))
{
// Download link available
downloadLink = new Uri(SiteLink + link);
@ -364,7 +364,7 @@ namespace Jackett.Common.Indexers
}
// Freeleech
int downloadVolumeFactor = 1;
var downloadVolumeFactor = 1;
if (tRow.Find("img[alt=\"Freeleech\"]").Length >= 1)
{
downloadVolumeFactor = 0;
@ -411,7 +411,7 @@ namespace Jackett.Common.Indexers
private string buildQuery(string term, TorznabQuery query, string url, int page = 0)
{
var parameters = new NameValueCollection();
List<string> categoriesList = MapTorznabCapsToTrackers(query);
var categoriesList = MapTorznabCapsToTrackers(query);
string categories = null;
// Check if we are processing a new page
@ -422,7 +422,7 @@ namespace Jackett.Common.Indexers
}
// Loop on Categories needed
foreach (string category in categoriesList)
foreach (var category in categoriesList)
{
// If last, build !
if (categoriesList.Last() == category)
@ -464,9 +464,9 @@ namespace Jackett.Common.Indexers
/// </summary>
/// <param name="request">URL created by Query Builder</param>
/// <returns>Results from query</returns>
private async Task<String> queryExec(string request)
private async Task<string> queryExec(string request)
{
String results = null;
string results = null;
// Switch in we are in DEV mode with Hard Drive Cache or not
if (DevMode && CacheMode)
@ -487,9 +487,9 @@ namespace Jackett.Common.Indexers
/// </summary>
/// <param name="request">URL created by Query Builder</param>
/// <returns>Results from query</returns>
private async Task<String> queryCache(string request)
private async Task<string> queryCache(string request)
{
String results;
string results;
// Create Directory if not exist
System.IO.Directory.CreateDirectory(Directory);
@ -498,10 +498,10 @@ namespace Jackett.Common.Indexers
cleanCacheStorage();
// File Name
string fileName = StringUtil.HashSHA1(request) + ".json";
var fileName = StringUtil.HashSHA1(request) + ".json";
// Create fingerprint for request
string file = Path.Combine(Directory, fileName);
var file = Path.Combine(Directory, fileName);
// Checking modes states
if (File.Exists(file))
@ -510,10 +510,10 @@ namespace Jackett.Common.Indexers
output("Loading results from hard drive cache ..." + fileName);
try
{
using (StreamReader fileReader = File.OpenText(file))
using (var fileReader = File.OpenText(file))
{
JsonSerializer serializer = new JsonSerializer();
results = (String)serializer.Deserialize(fileReader, typeof(String));
var serializer = new JsonSerializer();
results = (string)serializer.Deserialize(fileReader, typeof(string));
}
}
catch (Exception e)
@ -529,9 +529,9 @@ namespace Jackett.Common.Indexers
// Cached file didn't exist for our query, writing it right now !
output("Writing results to hard drive cache ..." + fileName);
using (StreamWriter fileWriter = File.CreateText(file))
using (var fileWriter = File.CreateText(file))
{
JsonSerializer serializer = new JsonSerializer();
var serializer = new JsonSerializer();
serializer.Serialize(fileWriter, results);
}
}
@ -543,7 +543,7 @@ namespace Jackett.Common.Indexers
/// </summary>
/// <param name="request">URL created by Query Builder</param>
/// <returns>Results from query</returns>
private async Task<String> queryTracker(string request)
private async Task<string> queryTracker(string request)
{
WebClientStringResult results = null;
@ -621,7 +621,7 @@ namespace Jackett.Common.Indexers
{
// Generate a random value in our range
var random = new Random(DateTime.Now.Millisecond);
int waiting = random.Next(Convert.ToInt32(ConfigData.LatencyStart.Value), Convert.ToInt32(ConfigData.LatencyEnd.Value));
var waiting = random.Next(Convert.ToInt32(ConfigData.LatencyStart.Value), Convert.ToInt32(ConfigData.LatencyEnd.Value));
output("\nLatency Faker => Sleeping for " + waiting + " ms...");
// Sleep now...

View File

@ -17,7 +17,7 @@ namespace Jackett.Common.Indexers.Abstract
protected string endpoint;
protected string APIUrl { get { return SiteLink + endpoint; } }
new ConfigurationDataUserPasskey configData
private new ConfigurationDataUserPasskey configData
{
get { return (ConfigurationDataUserPasskey)base.configData; }
set { base.configData = value; }

View File

@ -27,7 +27,7 @@ namespace Jackett.Common.Indexers.Abstract
protected bool supportsCategories = true; // set to false if the tracker doesn't include the categories in the API search results
protected bool useTokens = false;
new ConfigurationDataBasicLogin configData
private new ConfigurationDataBasicLogin configData
{
get { return (ConfigurationDataBasicLogin)base.configData; }
set { base.configData = value; }
@ -115,7 +115,7 @@ namespace Jackett.Common.Indexers.Abstract
if (!string.IsNullOrWhiteSpace(query.ImdbID))
{
if (this.imdbInTags)
if (imdbInTags)
queryCollection.Add("taglist", query.ImdbID);
else
queryCollection.Add("cataloguenumber", query.ImdbID);
@ -202,7 +202,7 @@ namespace Jackett.Common.Indexers.Abstract
{
foreach (JObject torrent in r["torrents"])
{
ReleaseInfo release2 = (ReleaseInfo)release.Clone();
var release2 = (ReleaseInfo)release.Clone();
FillReleaseInfoFromJson(release2, torrent);
if (ReleaseInfoPostParse(release2, torrent, r))
releases.Add(release2);
@ -230,7 +230,7 @@ namespace Jackett.Common.Indexers.Abstract
return true;
}
void FillReleaseInfoFromJson(ReleaseInfo release, JObject torrent)
private void FillReleaseInfoFromJson(ReleaseInfo release, JObject torrent)
{
var torrentId = torrent["torrentId"];

View File

@ -528,7 +528,7 @@ namespace Jackett.Common.Indexers
private IEnumerable<int> ParseCategories(Uri showUri)
{
Dictionary<string, string> categoriesMap = CategoriesMap;
var categoriesMap = CategoriesMap;
var path = showUri.AbsolutePath.ToLowerInvariant();

View File

@ -87,7 +87,8 @@ namespace Jackett.Common.Indexers
{"29", "Spanish (LATAM)" },
{"30", "Persian"},
{"31", "Malaysian"}
}) { Name = "Language", Value = "1" };
})
{ Name = "Language", Value = "1" };
configData.AddDynamic("languageid", languageSelect);
// Configure the sort selects
@ -97,7 +98,8 @@ namespace Jackett.Common.Indexers
{"seeders", "seeders"},
{"size", "size"},
{"filename", "title"}
}) { Name = "Sort by", Value = "upload_timestamp" };
})
{ Name = "Sort by", Value = "upload_timestamp" };
configData.AddDynamic("sortrequestedfromsite", sortBySelect);
var orderSelect = new SelectItem(new Dictionary<string, string>()

View File

@ -228,12 +228,12 @@ namespace Jackett.Common.Indexers
if (!string.IsNullOrWhiteSpace(EditionTitle))
releaseInfo = WebUtility.HtmlDecode(EditionTitle);
Regex SeasonRegEx = new Regex(@"Season (\d+)", RegexOptions.Compiled);
var SeasonRegEx = new Regex(@"Season (\d+)", RegexOptions.Compiled);
var SeasonRegExMatch = SeasonRegEx.Match(releaseInfo);
if (SeasonRegExMatch.Success)
season = ParseUtil.CoerceInt(SeasonRegExMatch.Groups[1].Value);
Regex EpisodeRegEx = new Regex(@"Episode (\d+)", RegexOptions.Compiled);
var EpisodeRegEx = new Regex(@"Episode (\d+)", RegexOptions.Compiled);
var EpisodeRegExMatch = EpisodeRegEx.Match(releaseInfo);
if (EpisodeRegExMatch.Success)
episode = EpisodeRegExMatch.Groups[1].Value;
@ -242,7 +242,7 @@ namespace Jackett.Common.Indexers
releaseInfo = releaseInfo.Replace("Season ", "S");
releaseInfo = releaseInfo.Trim();
if (PadEpisode && int.TryParse(releaseInfo, out int test) && releaseInfo.Length == 1)
if (PadEpisode && int.TryParse(releaseInfo, out var test) && releaseInfo.Length == 1)
{
releaseInfo = "0" + releaseInfo;
}
@ -322,7 +322,7 @@ namespace Jackett.Common.Indexers
// We dont actually have a release name >.> so try to create one
var releaseTags = Property.Split("|".ToCharArray(), StringSplitOptions.RemoveEmptyEntries).ToList();
for (int i = releaseTags.Count - 1; i >= 0; i--)
for (var i = releaseTags.Count - 1; i >= 0; i--)
{
releaseTags[i] = releaseTags[i].Trim();
if (string.IsNullOrWhiteSpace(releaseTags[i]))

View File

@ -94,7 +94,7 @@ namespace Jackett.Common.Indexers
var searchString = query.GetQueryString();
// replace any space, special char, etc. with % (wildcard)
Regex ReplaceRegex = new Regex("[^a-zA-Z0-9]+");
var ReplaceRegex = new Regex("[^a-zA-Z0-9]+");
searchString = ReplaceRegex.Replace(searchString, "%");
var searchUrl = SearchUrl;

View File

@ -91,15 +91,15 @@ namespace Jackett.Common.Indexers
protected override async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query)
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
List<string> searchStrings = new List<string>(new string[] { query.GetQueryString() });
var releases = new List<ReleaseInfo>();
var searchStrings = new List<string>(new string[] { query.GetQueryString() });
if (string.IsNullOrEmpty(query.Episode) && (query.Season > 0))
// Tracker naming rules: If query is for a whole season, "Season #" instead of "S##".
searchStrings.Add((query.SanitizedSearchTerm + " " + string.Format("\"Season {0}\"", query.Season)).Trim());
List<string> categories = MapTorznabCapsToTrackers(query);
List<string> request_urls = new List<string>();
var categories = MapTorznabCapsToTrackers(query);
var request_urls = new List<string>();
foreach (var searchString in searchStrings)
{
@ -118,12 +118,12 @@ namespace Jackett.Common.Indexers
request_urls.Add(SearchUrl + queryCollection.GetQueryString());
}
IEnumerable<Task<WebClientStringResult>> downloadTasksQuery =
var downloadTasksQuery =
from url in request_urls select RequestStringWithCookiesAndRetry(url);
WebClientStringResult[] responses = await Task.WhenAll(downloadTasksQuery.ToArray());
var responses = await Task.WhenAll(downloadTasksQuery.ToArray());
for (int i = 0; i < searchStrings.Count(); i++)
for (var i = 0; i < searchStrings.Count(); i++)
{
var results = responses[i];
// Occasionally the cookies become invalid, login again if that happens
@ -138,7 +138,7 @@ namespace Jackett.Common.Indexers
var rows = dom["#torrent_table > tbody > tr.torrent"];
foreach (var row in rows)
{
CQ qRow = row.Cq();
var qRow = row.Cq();
var release = new ReleaseInfo();
release.MinimumRatio = 1;
@ -183,7 +183,7 @@ namespace Jackett.Common.Indexers
if (catStr == "10") //change "Season #" to "S##" for TV shows
release.Title = Regex.Replace(release.Title, @"Season (\d+)",
m => string.Format("S{0:00}", Int32.Parse(m.Groups[1].Value)));
m => string.Format("S{0:00}", int.Parse(m.Groups[1].Value)));
releases.Add(release);
}

View File

@ -118,7 +118,7 @@ namespace Jackett.Common.Indexers
private string InternationalTitle(string title)
{
// Get international title if available, or use the full title if not
string cleanTitle = Regex.Replace(title, @".* \[(.*?)\](.*)", "$1$2");
var cleanTitle = Regex.Replace(title, @".* \[(.*?)\](.*)", "$1$2");
cleanTitle = Regex.Replace(cleanTitle, @"(?:.*)\/(.*)", "$1");
return cleanTitle.Trim();
@ -494,15 +494,15 @@ namespace Jackett.Common.Indexers
// Adjust the description in order to can be read by Radarr and Sonarr
var cleanDescription = release.Description.Trim().TrimStart('[').TrimEnd(']');
String[] titleElements;
string[] titleElements;
//Formats the title so it can be parsed later
string[] stringSeparators = new string[] { " / " };
var stringSeparators = new string[] { " / " };
titleElements = cleanDescription.Split(stringSeparators, StringSplitOptions.None);
// release.Title += string.Join(" ", titleElements);
release.Title = release.Title.Trim();
release.Title += " " + titleElements[5] + " " + titleElements[3] + " " + titleElements[1] + " " + titleElements[2] + " " + titleElements[4] + " " + String.Join(" ", titleElements.Skip(6).Take(titleElements.Length - 6).ToArray());
release.Title += " " + titleElements[5] + " " + titleElements[3] + " " + titleElements[1] + " " + titleElements[2] + " " + titleElements[4] + " " + string.Join(" ", titleElements.Skip(6).Take(titleElements.Length - 6).ToArray());
// This tracker does not provide an publish date to search terms (only on last 24h page)
release.PublishDate = DateTime.Today;

View File

@ -18,7 +18,7 @@ namespace Jackett.Common.Indexers
{
private string SearchUrl => SiteLink + "browse.php?only=0&hentai=1&incomplete=1&lossless=1&hd=1&multiaudio=1&bonus=1&reorder=1&q=";
private string LoginUrl => SiteLink + "login.php";
private string LogoutStr = "<a href=\"logout.php\">Logout</a>";
private readonly string LogoutStr = "<a href=\"logout.php\">Logout</a>";
private new ConfigurationDataBasicLogin configData
{

View File

@ -54,7 +54,7 @@ namespace Jackett.Common.Indexers
get { return configData.LastError.Value; }
set
{
bool SaveNeeded = configData.LastError.Value != value && IsConfigured;
var SaveNeeded = configData.LastError.Value != value && IsConfigured;
configData.LastError.Value = value;
if (SaveNeeded)
SaveConfig();
@ -100,7 +100,7 @@ namespace Jackett.Common.Indexers
protected void LoadLegacyCookieConfig(JToken jsonConfig)
{
string legacyCookieHeader = (string)jsonConfig["cookie_header"];
var legacyCookieHeader = (string)jsonConfig["cookie_header"];
if (!string.IsNullOrEmpty(legacyCookieHeader))
{
CookieHeader = legacyCookieHeader;
@ -113,7 +113,7 @@ namespace Jackett.Common.Indexers
{
var array = (JArray)jcookies;
legacyCookieHeader = string.Empty;
for (int i = 0; i < array.Count; i++)
for (var i = 0; i < array.Count; i++)
{
if (i != 0)
legacyCookieHeader += "; ";
@ -128,7 +128,7 @@ namespace Jackett.Common.Indexers
}
}
virtual public void LoadValuesFromJson(JToken jsonConfig, bool useProtectionService = false)
public virtual void LoadValuesFromJson(JToken jsonConfig, bool useProtectionService = false)
{
IProtectionService ps = null;
if (useProtectionService)
@ -165,7 +165,7 @@ namespace Jackett.Common.Indexers
}
}
// read and upgrade old settings file format
else if (jsonConfig is Object)
else if (jsonConfig is object)
{
LoadLegacyCookieConfig(jsonConfig);
SaveConfig();
@ -176,7 +176,7 @@ namespace Jackett.Common.Indexers
//TODO: Remove this section once users have moved off DPAPI
private bool MigratedFromDPAPI(JToken jsonConfig)
{
bool isWindows = Environment.OSVersion.Platform == PlatformID.Win32NT;
var isWindows = Environment.OSVersion.Platform == PlatformID.Win32NT;
if (!isWindows && DotNetCoreUtil.IsRunningOnDotNetCore)
{
@ -187,7 +187,7 @@ namespace Jackett.Common.Indexers
LoadValuesFromJson(jsonConfig, false);
StringItem passwordPropertyValue = null;
string passwordValue = "";
var passwordValue = "";
try
{
@ -233,7 +233,7 @@ namespace Jackett.Common.Indexers
try
{
string unprotectedPassword = protectionService.LegacyUnProtect(passwordValue);
var unprotectedPassword = protectionService.LegacyUnProtect(passwordValue);
//Password successfully unprotected using Windows/Mono DPAPI
passwordPropertyValue.Value = unprotectedPassword;
@ -362,8 +362,8 @@ namespace Jackett.Common.Indexers
protected BaseWebIndexer(string name, string link, string description, IIndexerConfigurationService configService, WebClient client, Logger logger, ConfigurationData configData, IProtectionService p, TorznabCapabilities caps = null, string downloadBase = null)
: base(name, link, description, configService, logger, configData, p)
{
this.webclient = client;
this.downloadUrlBase = downloadBase;
webclient = client;
downloadUrlBase = downloadBase;
if (caps == null)
caps = TorznabUtil.CreateDefaultTorznabTVCaps();
@ -374,10 +374,10 @@ namespace Jackett.Common.Indexers
protected BaseWebIndexer(IIndexerConfigurationService configService, WebClient client, Logger logger, IProtectionService p)
: base("", "/", "", configService, logger, null, p)
{
this.webclient = client;
webclient = client;
}
public async virtual Task<byte[]> Download(Uri link)
public virtual async Task<byte[]> Download(Uri link)
{
var uncleanLink = UncleanLink(link);
return await Download(uncleanLink, RequestType.GET);
@ -405,7 +405,7 @@ namespace Jackett.Common.Indexers
}
if (response.Status != System.Net.HttpStatusCode.OK && response.Status != System.Net.HttpStatusCode.Continue && response.Status != System.Net.HttpStatusCode.PartialContent)
{
logger.Error("Failed download cookies: " + this.CookieHeader);
logger.Error("Failed download cookies: " + CookieHeader);
if (response.Content != null)
logger.Error("Failed download response:\n" + Encoding.UTF8.GetString(response.Content));
throw new Exception($"Remote server returned {response.Status.ToString()}" + (response.IsRedirect ? " => " + response.RedirectingTo : ""));
@ -417,7 +417,7 @@ namespace Jackett.Common.Indexers
protected async Task<WebClientByteResult> RequestBytesWithCookiesAndRetry(string url, string cookieOverride = null, RequestType method = RequestType.GET, string referer = null, IEnumerable<KeyValuePair<string, string>> data = null)
{
Exception lastException = null;
for (int i = 0; i < 3; i++)
for (var i = 0; i < 3; i++)
{
try
{
@ -448,7 +448,7 @@ namespace Jackett.Common.Indexers
if (cookieOverride != null)
request.Cookies = cookieOverride;
WebClientStringResult result = await webclient.GetString(request);
var result = await webclient.GetString(request);
CheckTrackerDown(result);
UpdateCookieHeader(result.Cookies, cookieOverride);
return result;
@ -457,7 +457,7 @@ namespace Jackett.Common.Indexers
protected async Task<WebClientStringResult> RequestStringWithCookiesAndRetry(string url, string cookieOverride = null, string referer = null, Dictionary<string, string> headers = null)
{
Exception lastException = null;
for (int i = 0; i < 3; i++)
for (var i = 0; i < 3; i++)
{
try
{
@ -510,7 +510,7 @@ namespace Jackett.Common.Indexers
if (emulateBrowser.HasValue)
request.EmulateBrowser = emulateBrowser.Value;
WebClientStringResult result = await webclient.GetString(request);
var result = await webclient.GetString(request);
CheckTrackerDown(result);
UpdateCookieHeader(result.Cookies, cookieOverride);
return result;
@ -519,7 +519,7 @@ namespace Jackett.Common.Indexers
protected async Task<WebClientStringResult> PostDataWithCookiesAndRetry(string url, IEnumerable<KeyValuePair<string, string>> data, string cookieOverride = null, string referer = null, Dictionary<string, string> headers = null, string rawbody = null, bool? emulateBrowser = null)
{
Exception lastException = null;
for (int i = 0; i < 3; i++)
for (var i = 0; i < 3; i++)
{
try
{
@ -599,7 +599,7 @@ namespace Jackett.Common.Indexers
protected async Task FollowIfRedirect(WebClientByteResult response, string referrer = null, string overrideRedirectUrl = null, string overrideCookies = null, bool accumulateCookies = false)
{
// Follow up to 5 redirects
for (int i = 0; i < 5; i++)
for (var i = 0; i < 5; i++)
{
if (!response.IsRedirect)
break;
@ -616,11 +616,11 @@ namespace Jackett.Common.Indexers
}
}
private String ResolveCookies(String incomingCookies = "")
private string ResolveCookies(string incomingCookies = "")
{
var redirRequestCookies = (CookieHeader != null && CookieHeader != "" ? CookieHeader + " " : "") + incomingCookies;
System.Text.RegularExpressions.Regex expression = new System.Text.RegularExpressions.Regex(@"([^\\,;\s]+)=([^=\\,;\s]*)");
Dictionary<string, string> cookieDIctionary = new Dictionary<string, string>();
var expression = new System.Text.RegularExpressions.Regex(@"([^\\,;\s]+)=([^=\\,;\s]*)");
var cookieDIctionary = new Dictionary<string, string>();
var matches = expression.Match(redirRequestCookies);
while (matches.Success)
{
@ -636,7 +636,7 @@ namespace Jackett.Common.Indexers
// Update CookieHeader with new cookies and save the config if something changed (e.g. a new CloudFlare clearance cookie was issued)
protected virtual void UpdateCookieHeader(string newCookies, string cookieOverride = null)
{
string newCookieHeader = ResolveCookies((cookieOverride != null && cookieOverride != "" ? cookieOverride + " " : "") + newCookies);
var newCookieHeader = ResolveCookies((cookieOverride != null && cookieOverride != "" ? cookieOverride + " " : "") + newCookies);
if (CookieHeader != newCookieHeader)
{
logger.Debug(string.Format("updating Cookies {0} => {1}", CookieHeader, newCookieHeader));
@ -851,7 +851,7 @@ namespace Jackett.Common.Indexers
public override TorznabCapabilities TorznabCaps { get; protected set; }
private List<CategoryMapping> categoryMapping = new List<CategoryMapping>();
private readonly List<CategoryMapping> categoryMapping = new List<CategoryMapping>();
protected WebClient webclient;
protected readonly string downloadUrlBase = "";
}

View File

@ -19,7 +19,7 @@ namespace Jackett.Common.Indexers
{
private string LoginUrl { get { return SiteLink + "login/index.php"; } }
private string BrowseUrl { get { return SiteLink + "uebersicht.php"; } }
private TimeZoneInfo germanyTz = TimeZoneInfo.CreateCustomTimeZone("W. Europe Standard Time", new TimeSpan(1, 0, 0), "W. Europe Standard Time", "W. Europe Standard Time");
private readonly TimeZoneInfo germanyTz = TimeZoneInfo.CreateCustomTimeZone("W. Europe Standard Time", new TimeSpan(1, 0, 0), "W. Europe Standard Time", "W. Europe Standard Time");
private new ConfigurationDataBasicLoginWithRSSAndDisplay configData
{
@ -42,8 +42,8 @@ namespace Jackett.Common.Indexers
Language = "de-de";
Type = "private";
this.configData.DisplayText.Value = "Only the results from the first search result page are shown, adjust your profile settings to show a reasonable amount (it looks like there's no maximum).";
this.configData.DisplayText.Name = "Notice";
configData.DisplayText.Value = "Only the results from the first search result page are shown, adjust your profile settings to show a reasonable amount (it looks like there's no maximum).";
configData.DisplayText.Name = "Notice";
AddCategoryMapping(1, TorznabCatType.Other); // Anderes
AddCategoryMapping(2, TorznabCatType.TVAnime); // Anime
@ -144,7 +144,7 @@ namespace Jackett.Common.Indexers
var qRow = row.Cq();
var flagImgs = qRow.Find("table tbody tr: eq(0) td > img");
List<string> flags = new List<string>();
var flags = new List<string>();
flagImgs.Each(flagImg =>
{
var flag = flagImg.GetAttribute("src").Replace("pic/torrent_", "").Replace(".gif", "").ToUpper();
@ -163,12 +163,12 @@ namespace Jackett.Common.Indexers
if (!query.MatchQueryStringAND(release.Title))
continue;
release.Description = String.Join(", ", flags);
release.Description = string.Join(", ", flags);
release.Guid = release.Link;
var dateStr = qRow.Find("table tbody tr:eq(1) td:eq(4)").Html().Replace("&nbsp;", " ").Trim();
var dateGerman = DateTime.SpecifyKind(DateTime.ParseExact(dateStr, "dd.MM.yyyy HH:mm:ss", CultureInfo.InvariantCulture), DateTimeKind.Unspecified);
DateTime pubDateUtc = TimeZoneInfo.ConvertTimeToUtc(dateGerman, germanyTz);
var pubDateUtc = TimeZoneInfo.ConvertTimeToUtc(dateGerman, germanyTz);
release.PublishDate = pubDateUtc.ToLocalTime();
var sizeStr = qRow.Find("table tbody tr:eq(1) td b").First().Text().Trim();

View File

@ -60,8 +60,8 @@ namespace Jackett.Common.Indexers
{
var loginPage = await RequestStringWithCookies(LoginUrl, configData.CookieHeader.Value);
CQ cq = loginPage.Content;
string recaptchaSiteKey = cq.Find(".g-recaptcha").Attr("data-sitekey");
var result = this.configData;
var recaptchaSiteKey = cq.Find(".g-recaptcha").Attr("data-sitekey");
var result = configData;
result.CookieHeader.Value = loginPage.Cookies;
result.Captcha.SiteKey = recaptchaSiteKey;
result.Captcha.Version = "2";

View File

@ -18,7 +18,7 @@ namespace Jackett.Common.Indexers
public class BroadcastTheNet : BaseWebIndexer
{
// Docs at http://apidocs.broadcasthe.net/docs.php
private string APIBASE = "https://api.broadcasthe.net";
private readonly string APIBASE = "https://api.broadcasthe.net";
private new ConfigurationDataAPIKey configData
{

View File

@ -33,9 +33,9 @@ namespace Jackett.Common.Indexers
protected List<string> DefaultCategories = new List<string>();
new ConfigurationData configData
private new ConfigurationData configData
{
get { return (ConfigurationData)base.configData; }
get { return base.configData; }
set { base.configData = value; }
}
@ -196,10 +196,10 @@ namespace Jackett.Common.Indexers
protected Dictionary<string, object> getTemplateVariablesFromConfigData()
{
Dictionary<string, object> variables = new Dictionary<string, object>();
var variables = new Dictionary<string, object>();
variables[".Config.sitelink"] = SiteLink;
foreach (settingsField Setting in Definition.Settings)
foreach (var Setting in Definition.Settings)
{
string value;
var item = configData.GetDynamic(Setting.Name);
@ -232,17 +232,17 @@ namespace Jackett.Common.Indexers
// handle re_replace expression
// Example: {{ re_replace .Query.Keywords "[^a-zA-Z0-9]+" "%" }}
Regex ReReplaceRegex = new Regex(@"{{\s*re_replace\s+(\..+?)\s+""(.*?)""\s+""(.*?)""\s*}}");
var ReReplaceRegex = new Regex(@"{{\s*re_replace\s+(\..+?)\s+""(.*?)""\s+""(.*?)""\s*}}");
var ReReplaceRegexMatches = ReReplaceRegex.Match(template);
while (ReReplaceRegexMatches.Success)
{
string all = ReReplaceRegexMatches.Groups[0].Value;
string variable = ReReplaceRegexMatches.Groups[1].Value;
string regexp = ReReplaceRegexMatches.Groups[2].Value;
string newvalue = ReReplaceRegexMatches.Groups[3].Value;
var all = ReReplaceRegexMatches.Groups[0].Value;
var variable = ReReplaceRegexMatches.Groups[1].Value;
var regexp = ReReplaceRegexMatches.Groups[2].Value;
var newvalue = ReReplaceRegexMatches.Groups[3].Value;
Regex ReplaceRegex = new Regex(regexp);
var ReplaceRegex = new Regex(regexp);
var input = (string)variables[variable];
var expanded = ReplaceRegex.Replace(input, newvalue);
@ -255,14 +255,14 @@ namespace Jackett.Common.Indexers
// handle join expression
// Example: {{ join .Categories "," }}
Regex JoinRegex = new Regex(@"{{\s*join\s+(\..+?)\s+""(.*?)""\s*}}");
var JoinRegex = new Regex(@"{{\s*join\s+(\..+?)\s+""(.*?)""\s*}}");
var JoinMatches = JoinRegex.Match(template);
while (JoinMatches.Success)
{
string all = JoinMatches.Groups[0].Value;
string variable = JoinMatches.Groups[1].Value;
string delimiter = JoinMatches.Groups[2].Value;
var all = JoinMatches.Groups[0].Value;
var variable = JoinMatches.Groups[1].Value;
var delimiter = JoinMatches.Groups[2].Value;
var input = (ICollection<string>)variables[variable];
var expanded = string.Join(delimiter, input);
@ -275,17 +275,17 @@ namespace Jackett.Common.Indexers
}
// handle or, and functions
Regex AndOrRegex = new Regex(@"(and|or)\s+\((\..+?)\)\s+\((\..+?)\)(\s+\((\..+?)\)){0,1}");
var AndOrRegex = new Regex(@"(and|or)\s+\((\..+?)\)\s+\((\..+?)\)(\s+\((\..+?)\)){0,1}");
var AndOrRegexMatches = AndOrRegex.Match(template);
while (AndOrRegexMatches.Success)
{
string functionResult = "";
string all = AndOrRegexMatches.Groups[0].Value;
string op = AndOrRegexMatches.Groups[1].Value;
string first = AndOrRegexMatches.Groups[2].Value;
string second = AndOrRegexMatches.Groups[3].Value;
string third = "";
var functionResult = "";
var all = AndOrRegexMatches.Groups[0].Value;
var op = AndOrRegexMatches.Groups[1].Value;
var first = AndOrRegexMatches.Groups[2].Value;
var second = AndOrRegexMatches.Groups[3].Value;
var third = "";
if (AndOrRegexMatches.Groups.Count > 5)
{
third = AndOrRegexMatches.Groups[5].Value;
@ -334,17 +334,17 @@ namespace Jackett.Common.Indexers
}
// handle if ... else ... expression
Regex IfElseRegex = new Regex(@"{{\s*if\s*(.+?)\s*}}(.*?){{\s*else\s*}}(.*?){{\s*end\s*}}");
var IfElseRegex = new Regex(@"{{\s*if\s*(.+?)\s*}}(.*?){{\s*else\s*}}(.*?){{\s*end\s*}}");
var IfElseRegexMatches = IfElseRegex.Match(template);
while (IfElseRegexMatches.Success)
{
string conditionResult = null;
string all = IfElseRegexMatches.Groups[0].Value;
string condition = IfElseRegexMatches.Groups[1].Value;
string onTrue = IfElseRegexMatches.Groups[2].Value;
string onFalse = IfElseRegexMatches.Groups[3].Value;
var all = IfElseRegexMatches.Groups[0].Value;
var condition = IfElseRegexMatches.Groups[1].Value;
var onTrue = IfElseRegexMatches.Groups[2].Value;
var onFalse = IfElseRegexMatches.Groups[3].Value;
if (condition.StartsWith("."))
{
@ -378,19 +378,19 @@ namespace Jackett.Common.Indexers
}
// handle range expression
Regex RangeRegex = new Regex(@"{{\s*range\s*(.+?)\s*}}(.*?){{\.}}(.*?){{end}}");
var RangeRegex = new Regex(@"{{\s*range\s*(.+?)\s*}}(.*?){{\.}}(.*?){{end}}");
var RangeRegexMatches = RangeRegex.Match(template);
while (RangeRegexMatches.Success)
{
string expanded = string.Empty;
var expanded = string.Empty;
string all = RangeRegexMatches.Groups[0].Value;
string variable = RangeRegexMatches.Groups[1].Value;
string prefix = RangeRegexMatches.Groups[2].Value;
string postfix = RangeRegexMatches.Groups[3].Value;
var all = RangeRegexMatches.Groups[0].Value;
var variable = RangeRegexMatches.Groups[1].Value;
var prefix = RangeRegexMatches.Groups[2].Value;
var postfix = RangeRegexMatches.Groups[3].Value;
foreach (string value in (ICollection<string>)variables[variable])
foreach (var value in (ICollection<string>)variables[variable])
{
var newvalue = value;
if (modifier != null)
@ -402,17 +402,17 @@ namespace Jackett.Common.Indexers
}
// handle simple variables
Regex VariablesRegEx = new Regex(@"{{\s*(\..+?)\s*}}");
var VariablesRegEx = new Regex(@"{{\s*(\..+?)\s*}}");
var VariablesRegExMatches = VariablesRegEx.Match(template);
while (VariablesRegExMatches.Success)
{
string expanded = string.Empty;
var expanded = string.Empty;
string all = VariablesRegExMatches.Groups[0].Value;
string variable = VariablesRegExMatches.Groups[1].Value;
var all = VariablesRegExMatches.Groups[0].Value;
var variable = VariablesRegExMatches.Groups[1].Value;
string value = (string)variables[variable];
var value = (string)variables[variable];
if (modifier != null)
value = modifier(value);
template = template.Replace(all, value);
@ -432,12 +432,12 @@ namespace Jackett.Common.Indexers
var ResultParser = new HtmlParser();
var ResultDocument = ResultParser.ParseDocument(loginResult.Content);
foreach (errorBlock error in errorBlocks)
foreach (var error in errorBlocks)
{
var selection = ResultDocument.QuerySelector(error.Selector);
if (selection != null)
{
string errorMessage = selection.TextContent;
var errorMessage = selection.TextContent;
if (error.Message != null)
{
errorMessage = handleSelector(error.Message, ResultDocument.FirstElementChild);
@ -858,7 +858,7 @@ namespace Jackett.Common.Indexers
configData.CookieHeader.Value = null;
if (Login.Cookies != null)
configData.CookieHeader.Value = String.Join("; ", Login.Cookies);
configData.CookieHeader.Value = string.Join("; ", Login.Cookies);
landingResult = await RequestStringWithCookies(LoginUrl.AbsoluteUri, null, SiteLink);
var htmlParser = new HtmlParser();
@ -965,7 +965,7 @@ namespace Jackett.Common.Indexers
if (Filters == null)
return Data;
foreach (filterBlock Filter in Filters)
foreach (var Filter in Filters)
{
switch (Filter.Name)
{
@ -996,7 +996,7 @@ namespace Jackett.Common.Indexers
var regexpreplace_pattern = (string)Filter.Args[0];
var regexpreplace_replacement = (string)Filter.Args[1];
regexpreplace_replacement = applyGoTemplateText(regexpreplace_replacement, variables);
Regex regexpreplace_regex = new Regex(regexpreplace_pattern);
var regexpreplace_regex = new Regex(regexpreplace_pattern);
Data = regexpreplace_regex.Replace(Data, regexpreplace_replacement);
break;
case "split":
@ -1059,12 +1059,12 @@ namespace Jackett.Common.Indexers
{
// Should replace diacritics charcaters with their base character
// It's not perfect, e.g. "ŠĐĆŽ - šđčćž" becomes "SĐCZ-sđccz"
string stFormD = Data.Normalize(NormalizationForm.FormD);
int len = stFormD.Length;
StringBuilder sb = new StringBuilder();
for (int i = 0; i < len; i++)
var stFormD = Data.Normalize(NormalizationForm.FormD);
var len = stFormD.Length;
var sb = new StringBuilder();
for (var i = 0; i < len; i++)
{
System.Globalization.UnicodeCategory uc = System.Globalization.CharUnicodeInfo.GetUnicodeCategory(stFormD[i]);
var uc = System.Globalization.CharUnicodeInfo.GetUnicodeCategory(stFormD[i]);
if (uc != System.Globalization.UnicodeCategory.NonSpacingMark)
{
sb.Append(stFormD[i]);
@ -1121,7 +1121,7 @@ namespace Jackett.Common.Indexers
return applyFilters(applyGoTemplateText(Selector.Text, variables), Selector.Filters, variables);
}
IElement selection = Dom;
var selection = Dom;
string value = null;
if (Selector.Selector != null)
@ -1183,7 +1183,7 @@ namespace Jackett.Common.Indexers
{
var releases = new List<ReleaseInfo>();
searchBlock Search = Definition.Search;
var Search = Definition.Search;
// init template context
var variables = getTemplateVariablesFromConfigData();
@ -1216,7 +1216,7 @@ namespace Jackett.Common.Indexers
var mappedCategories = MapTorznabCapsToTrackers(query);
if (mappedCategories.Count == 0)
{
mappedCategories = this.DefaultCategories;
mappedCategories = DefaultCategories;
}
variables[".Categories"] = mappedCategories;
@ -1254,9 +1254,9 @@ namespace Jackett.Common.Indexers
// HttpUtility.UrlPathEncode seems to only encode spaces, we use UrlEncode and replace + with %20 as a workaround
var searchUrl = resolvePath(applyGoTemplateText(SearchPath.Path, variables, WebUtility.UrlEncode).Replace("+", "%20")).AbsoluteUri;
var queryCollection = new List<KeyValuePair<string, string>>();
RequestType method = RequestType.GET;
var method = RequestType.GET;
if (String.Equals(SearchPath.Method, "post", StringComparison.OrdinalIgnoreCase))
if (string.Equals(SearchPath.Method, "post", StringComparison.OrdinalIgnoreCase))
{
method = RequestType.POST;
}
@ -1275,7 +1275,7 @@ namespace Jackett.Common.Indexers
if (Input.Key == "$raw")
{
var rawStr = applyGoTemplateText(Input.Value, variables, WebUtility.UrlEncode);
foreach (string part in rawStr.Split('&'))
foreach (var part in rawStr.Split('&'))
{
var parts = part.Split(new char[] { '=' }, 2);
var key = parts[0];
@ -1358,7 +1358,7 @@ namespace Jackett.Common.Indexers
var rowsSelector = applyGoTemplateText(Search.Rows.Selector, variables);
var RowsDom = SearchResultDocument.QuerySelectorAll(rowsSelector);
List<IElement> Rows = new List<IElement>();
var Rows = new List<IElement>();
foreach (var RowDom in RowsDom)
{
Rows.Add(RowDom);
@ -1368,14 +1368,14 @@ namespace Jackett.Common.Indexers
var After = Definition.Search.Rows.After;
if (After > 0)
{
for (int i = 0; i < Rows.Count; i += 1)
for (var i = 0; i < Rows.Count; i += 1)
{
var CurrentRow = Rows[i];
for (int j = 0; j < After; j += 1)
for (var j = 0; j < After; j += 1)
{
var MergeRowIndex = i + j + 1;
var MergeRow = Rows[MergeRowIndex];
List<INode> MergeNodes = new List<INode>();
var MergeNodes = new List<INode>();
foreach (var node in MergeRow.ChildNodes)
{
MergeNodes.Add(node);
@ -1535,14 +1535,14 @@ namespace Jackett.Common.Indexers
value = release.Imdb.ToString();
break;
case "rageid":
Regex RageIDRegEx = new Regex(@"(\d+)", RegexOptions.Compiled);
var RageIDRegEx = new Regex(@"(\d+)", RegexOptions.Compiled);
var RageIDMatch = RageIDRegEx.Match(value);
var RageID = RageIDMatch.Groups[1].Value;
release.RageID = ParseUtil.CoerceLong(RageID);
value = release.RageID.ToString();
break;
case "tvdbid":
Regex TVDBIdRegEx = new Regex(@"(\d+)", RegexOptions.Compiled);
var TVDBIdRegEx = new Regex(@"(\d+)", RegexOptions.Compiled);
var TVDBIdMatch = TVDBIdRegEx.Match(value);
var TVDBId = TVDBIdMatch.Groups[1].Value;
release.TVDBId = ParseUtil.CoerceLong(TVDBId);
@ -1575,12 +1575,12 @@ namespace Jackett.Common.Indexers
var SkipRelease = false;
if (Filters != null)
{
foreach (filterBlock Filter in Filters)
foreach (var Filter in Filters)
{
switch (Filter.Name)
{
case "andmatch":
int CharacterLimit = -1;
var CharacterLimit = -1;
if (Filter.Args != null)
CharacterLimit = int.Parse(Filter.Args);
@ -1672,8 +1672,8 @@ namespace Jackett.Common.Indexers
Dictionary<string, string> pairs = null;
var queryCollection = new NameValueCollection();
RequestType method = RequestType.GET;
if (String.Equals(request.Method, "post", StringComparison.OrdinalIgnoreCase))
var method = RequestType.GET;
if (string.Equals(request.Method, "post", StringComparison.OrdinalIgnoreCase))
{
method = RequestType.POST;
pairs = new Dictionary<string, string>();
@ -1711,7 +1711,7 @@ namespace Jackett.Common.Indexers
variables[prefix + ".PathAndQuery"] = uri.PathAndQuery;
variables[prefix + ".Query"] = uri.Query;
var queryString = QueryHelpers.ParseQuery(uri.Query);
foreach (string key in queryString.Keys)
foreach (var key in queryString.Keys)
{
//If we have supplied the same query string multiple time, just take the first.
variables[prefix + ".Query." + key] = queryString[key].First();

View File

@ -51,7 +51,7 @@ namespace Jackett.Common.Indexers
return "%";
}
var searchString = query.GetQueryString();
Regex ReplaceRegex = new Regex("[^a-zA-Z0-9]+");
var ReplaceRegex = new Regex("[^a-zA-Z0-9]+");
searchString = ReplaceRegex.Replace(searchString, "%");
return searchString;
}

View File

@ -91,10 +91,10 @@ namespace Jackett.Common.Indexers
{
var loginPage = await RequestStringWithCookies(LoginUrl, configData.CookieHeader.Value);
CQ cq = loginPage.Content;
string recaptchaSiteKey = cq.Find(".g-recaptcha").Attr("data-sitekey");
var recaptchaSiteKey = cq.Find(".g-recaptcha").Attr("data-sitekey");
if (recaptchaSiteKey != null)
{
var result = this.configData;
var result = configData;
result.CookieHeader.Value = loginPage.Cookies;
result.Captcha.SiteKey = recaptchaSiteKey;
result.Captcha.Version = "2";
@ -158,7 +158,7 @@ namespace Jackett.Common.Indexers
protected override async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query)
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var releases = new List<ReleaseInfo>();
var queryCollection = new NameValueCollection();
var searchString = query.GetQueryString();
@ -197,7 +197,7 @@ namespace Jackett.Common.Indexers
private IEnumerable<ReleaseInfo> contentToReleaseInfos(TorznabQuery query, CQ dom)
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var releases = new List<ReleaseInfo>();
// Doesn't handle pagination yet...
var rows = dom["div.panel-body > table.table > tbody > tr"];

View File

@ -266,7 +266,7 @@ namespace Jackett.Common.Indexers
//issue #5064 replace multi keyword
if (!string.IsNullOrEmpty(ReplaceMulti))
{
System.Text.RegularExpressions.Regex regex = new Regex("(?i)([\\.\\- ])MULTI([\\.\\- ])");
var regex = new Regex("(?i)([\\.\\- ])MULTI([\\.\\- ])");
release.Title = regex.Replace(release.Title, "$1" + ReplaceMulti + "$2");
}
// issue #6855 Replace VOSTFR with ENGLISH

View File

@ -1,4 +1,3 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Xml.Linq;
@ -36,12 +35,12 @@ namespace Jackett.Common.Indexers.Feeds
Link = item.FirstValue("link").ToUri(),
Comments = item.FirstValue("comments").ToUri(),
PublishDate = item.FirstValue("pubDate").ToDateTime(),
Category = new List<int> { Int32.Parse(attributes.First(e => e.Attribute("name").Value == "category").Attribute("value").Value) },
Size = ReadAttribute(attributes, "size").TryParse<Int64>(),
Files = ReadAttribute(attributes, "files").TryParse<Int64>(),
Category = new List<int> { int.Parse(attributes.First(e => e.Attribute("name").Value == "category").Attribute("value").Value) },
Size = ReadAttribute(attributes, "size").TryParse<long>(),
Files = ReadAttribute(attributes, "files").TryParse<long>(),
Description = item.FirstValue("description"),
Seeders = ReadAttribute(attributes, "seeders").TryParse<Int32>(),
Peers = ReadAttribute(attributes, "peers").TryParse<Int32>(),
Seeders = ReadAttribute(attributes, "seeders").TryParse<int>(),
Peers = ReadAttribute(attributes, "peers").TryParse<int>(),
InfoHash = attributes.First(e => e.Attribute("name").Value == "infohash").Attribute("value").Value,
MagnetUri = attributes.First(e => e.Attribute("name").Value == "magneturl").Attribute("value").Value.ToUri(),
};

View File

@ -106,7 +106,7 @@ namespace Jackett.Common.Indexers
var searchString = query.GetQueryString();
var cats = MapTorznabCapsToTrackers(query);
string cat = "0";
var cat = "0";
if (cats.Count == 1)
{
cat = cats[0];

View File

@ -74,7 +74,7 @@ namespace Jackett.Common.Indexers
protected override async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query)
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var releases = new List<ReleaseInfo>();
var searchString = query.GetQueryString();
var searchUrl = SearchUrl;
@ -88,7 +88,7 @@ namespace Jackett.Common.Indexers
}
var cats = MapTorznabCapsToTrackers(query);
string cat = "0";
var cat = "0";
if (cats.Count == 1)
{
cat = cats[0];

View File

@ -107,7 +107,7 @@ namespace Jackett.Common.Indexers
var captcha = cq.Find(".g-recaptcha"); // invisible recaptcha
if (captcha.Any())
{
var result = this.configData;
var result = configData;
result.CookieHeader.Value = loginPage.Cookies;
result.Captcha.SiteKey = captcha.Attr("data-sitekey");
result.Captcha.Version = "2";
@ -232,7 +232,7 @@ namespace Jackett.Common.Indexers
var rows = dom["tr.box_torrent"];
foreach (var row in rows)
{
CQ qRow = row.Cq();
var qRow = row.Cq();
var release = new ReleaseInfo();
var main_title_link = qRow.Find("div.main_title > a");
@ -243,11 +243,10 @@ namespace Jackett.Common.Indexers
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800; // 48 hours
int seeders, peers;
if (ParseUtil.TryCoerceInt(qRow.Find("td:nth-child(7) > div").Text(), out seeders))
if (ParseUtil.TryCoerceInt(qRow.Find("td:nth-child(7) > div").Text(), out var seeders))
{
release.Seeders = seeders;
if (ParseUtil.TryCoerceInt(qRow.Find("td:nth-child(8) > div").Text(), out peers))
if (ParseUtil.TryCoerceInt(qRow.Find("td:nth-child(8) > div").Text(), out var peers))
{
release.Peers = peers + release.Seeders;
}
@ -255,7 +254,7 @@ namespace Jackett.Common.Indexers
release.Grabs = ParseUtil.CoerceLong(qRow.Find("td:nth-child(5)").Text().Replace(",", ""));
release.Seeders = ParseUtil.CoerceInt(qRow.Find("td:nth-child(6)").Text().Replace(",", ""));
release.Peers = ParseUtil.CoerceInt(qRow.Find("td:nth-child(7)").Text().Replace(",", "")) + release.Seeders;
string fullSize = qRow.Find("td:nth-child(4)").Text();
var fullSize = qRow.Find("td:nth-child(4)").Text();
release.Size = ReleaseInfo.GetBytes(fullSize);
release.Comments = new Uri(SiteLink + qRow.Find("a.threadlink[href]").Attr("href"));
@ -272,10 +271,10 @@ namespace Jackett.Common.Indexers
var dateStringAll = qRow.Find("div.up_info2")[0].ChildNodes.Last().ToString();
var dateParts = dateStringAll.Split(' ');
string dateString = dateParts[dateParts.Length - 2] + " " + dateParts[dateParts.Length - 1];
var dateString = dateParts[dateParts.Length - 2] + " " + dateParts[dateParts.Length - 1];
release.PublishDate = DateTime.ParseExact(dateString, "dd/MM/yy HH:mm", CultureInfo.InvariantCulture);
string categoryLink = qRow.Find("a[href^=\"/browse.php?cat=\"]").Attr("href");
var categoryLink = qRow.Find("a[href^=\"/browse.php?cat=\"]").Attr("href");
var catid = ParseUtil.GetArgumentFromQueryString(categoryLink, "cat");
release.Category = MapTrackerCatToNewznab(catid);
@ -313,7 +312,7 @@ namespace Jackett.Common.Indexers
CQ dom = results.Content;
int rowCount = 0;
var rowCount = 0;
var rows = dom["#listtable > tbody > tr"];
foreach (var row in rows)
@ -324,8 +323,8 @@ namespace Jackett.Common.Indexers
continue;
}
CQ qRow = row.Cq();
CQ link = qRow.Find("td:nth-child(1) > a");
var qRow = row.Cq();
var link = qRow.Find("td:nth-child(1) > a");
if (link.Text().Trim().ToLower() == searchTerm.Trim().ToLower())
{
var address = link.Attr("href");

View File

@ -230,13 +230,13 @@ namespace Jackett.Common.Indexers
try
{
string RowsSelector = ".torrent_table > tbody > tr";
var RowsSelector = ".torrent_table > tbody > tr";
var SearchResultParser = new HtmlParser();
var SearchResultDocument = SearchResultParser.ParseDocument(results.Content);
var Rows = SearchResultDocument.QuerySelectorAll(RowsSelector);
bool stickyGroup = false;
var stickyGroup = false;
string CategoryStr;
ICollection<int> GroupCategory = null;
string GroupTitle = null;

View File

@ -75,7 +75,7 @@ namespace Jackett.Common.Indexers
{
dynamic requestData = new JObject();
var queryString = query.GetQueryString();
int? imdbId = ParseUtil.GetImdbID(query.ImdbID);
var imdbId = ParseUtil.GetImdbID(query.ImdbID);
if (imdbId != null)
{

View File

@ -131,7 +131,7 @@ namespace Jackett.Common.Indexers
response = await PostDataWithCookies(SearchUrl, pairs, configData.CookieHeader.Value, SiteLink, headers, body);
}
List<ReleaseInfo> releases = ParseResponse(query, response, includePremium);
var releases = ParseResponse(query, response, includePremium);
return releases;
}
@ -145,7 +145,7 @@ namespace Jackett.Common.Indexers
private List<ReleaseInfo> ParseResponse(TorznabQuery query, WebClientStringResult response, bool includePremium)
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var releases = new List<ReleaseInfo>();
var torrents = CheckResponse(response);

View File

@ -92,7 +92,7 @@ namespace Jackett.Common.Indexers
{
var errorStr = "You have {0} remaining login attempts";
var remainingAttemptSpan = new Regex(string.Format(errorStr, "(.*?)")).Match(loginPage.Content).Groups[1].ToString();
var attempts = Regex.Replace(remainingAttemptSpan, "<.*?>", String.Empty);
var attempts = Regex.Replace(remainingAttemptSpan, "<.*?>", string.Empty);
var errorMessage = string.Format(errorStr, attempts);
throw new ExceptionWithConfigData(errorMessage, configData);
});
@ -131,7 +131,7 @@ namespace Jackett.Common.Indexers
if (prev == null || prev.NodeName.ToLowerInvariant() != "style")
continue;
CQ qRow = row.Cq();
var qRow = row.Cq();
var release = new ReleaseInfo();
release.MinimumRatio = 1;

View File

@ -133,22 +133,22 @@ namespace Jackett.Common.Indexers
CQ dom = results.Content;
ReleaseInfo release;
CQ userInfo = dom[".mainmenu > table > tbody > tr:has(td[title=\"Active-Torrents\"])"][0].Cq();
string rank = userInfo.Find("td:nth-child(2)").Text().Substring(6);
var userInfo = dom[".mainmenu > table > tbody > tr:has(td[title=\"Active-Torrents\"])"][0].Cq();
var rank = userInfo.Find("td:nth-child(2)").Text().Substring(6);
HashSet<string> freeleechRanks = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
var freeleechRanks = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
freeleechRanks.Add("VIP");
freeleechRanks.Add("Uploader");
freeleechRanks.Add("HD Internal");
freeleechRanks.Add("Moderator");
freeleechRanks.Add("Administrator");
freeleechRanks.Add("Owner");
bool hasFreeleech = freeleechRanks.Contains(rank);
var hasFreeleech = freeleechRanks.Contains(rank);
var rows = dom[".mainblockcontenttt > tbody > tr:has(a[href^=\"details.php?id=\"])"];
foreach (var row in rows)
{
CQ qRow = row.Cq();
var qRow = row.Cq();
release = new ReleaseInfo();
@ -158,7 +158,7 @@ namespace Jackett.Common.Indexers
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800; // 48 hours
int tdIndex = 0;
var tdIndex = 0;
if (qRow.Find("td:nth-last-child(1)").Text() == "Edit")
tdIndex = 1;
// moderators get additional delete, recomend and like links
@ -166,33 +166,33 @@ namespace Jackett.Common.Indexers
tdIndex = 4;
// Sometimes the uploader column is missing
if (ParseUtil.TryCoerceInt(qRow.Find($"td:nth-last-child({tdIndex + 3})").Text(), out int seeders))
if (ParseUtil.TryCoerceInt(qRow.Find($"td:nth-last-child({tdIndex + 3})").Text(), out var seeders))
{
release.Seeders = seeders;
if (ParseUtil.TryCoerceInt(qRow.Find($"td:nth-last-child({tdIndex + 2})").Text(), out int peers))
if (ParseUtil.TryCoerceInt(qRow.Find($"td:nth-last-child({tdIndex + 2})").Text(), out var peers))
{
release.Peers = peers + release.Seeders;
}
}
// Sometimes the grabs column is missing
if (ParseUtil.TryCoerceLong(qRow.Find($"td:nth-last-child({tdIndex + 1})").Text(), out long grabs))
if (ParseUtil.TryCoerceLong(qRow.Find($"td:nth-last-child({tdIndex + 1})").Text(), out var grabs))
{
release.Grabs = grabs;
}
string fullSize = qRow.Find("td.mainblockcontent").Get(6).InnerText;
var fullSize = qRow.Find("td.mainblockcontent").Get(6).InnerText;
release.Size = ReleaseInfo.GetBytes(fullSize);
release.Guid = new Uri(SiteLink + qRow.Find("td.mainblockcontent b a").Attr("href"));
release.Link = new Uri(SiteLink + qRow.Find("td.mainblockcontent").Get(3).FirstChild.GetAttribute("href"));
release.Comments = new Uri(SiteLink + qRow.Find("td.mainblockcontent b a").Attr("href"));
string[] dateSplit = qRow.Find("td.mainblockcontent").Get(5).InnerHTML.Split(',');
string dateString = dateSplit[1].Substring(0, dateSplit[1].IndexOf('>')).Trim();
var dateSplit = qRow.Find("td.mainblockcontent").Get(5).InnerHTML.Split(',');
var dateString = dateSplit[1].Substring(0, dateSplit[1].IndexOf('>')).Trim();
release.PublishDate = DateTime.ParseExact(dateString, "dd MMM yyyy HH:mm:ss zz00", CultureInfo.InvariantCulture).ToLocalTime();
string category = qRow.Find("td:eq(0) a").Attr("href").Replace("torrents.php?category=", "");
var category = qRow.Find("td:eq(0) a").Attr("href").Replace("torrents.php?category=", "");
release.Category = MapTrackerCatToNewznab(category);
release.UploadVolumeFactor = 1;

View File

@ -102,7 +102,7 @@ namespace Jackett.Common.Indexers
{
CQ dom = response.Content;
CQ qRows = dom[".browse > div > div"];
var qRows = dom[".browse > div > div"];
foreach (var row in qRows)
{

View File

@ -17,7 +17,7 @@ using NLog;
namespace Jackett.Common.Indexers
{
class HorribleSubs : BaseWebIndexer
internal class HorribleSubs : BaseWebIndexer
{
private string ApiEndpoint { get { return SiteLink + "api.php"; } }
@ -153,13 +153,13 @@ namespace Jackett.Common.Indexers
var showPageResponse = await RequestStringWithCookiesAndRetry(ResultURL, string.Empty);
await FollowIfRedirect(showPageResponse);
Match match = Regex.Match(showPageResponse.Content, "(var hs_showid = )([0-9]*)(;)", RegexOptions.IgnoreCase);
var match = Regex.Match(showPageResponse.Content, "(var hs_showid = )([0-9]*)(;)", RegexOptions.IgnoreCase);
if (match.Success == false)
{
return releases;
}
int ShowID = int.Parse(match.Groups[2].Value);
var ShowID = int.Parse(match.Groups[2].Value);
var apiUrls = new string[] {
ApiEndpoint + "?method=getshows&type=batch&showid=" + ShowID, //https://horriblesubs.info/api.php?method=getshows&type=batch&showid=1194
@ -167,9 +167,9 @@ namespace Jackett.Common.Indexers
};
var releaserows = new List<AngleSharp.Dom.IElement>();
foreach (string apiUrl in apiUrls)
foreach (var apiUrl in apiUrls)
{
int nextId = 0;
var nextId = 0;
while (true)
{
var showAPIResponse = await RequestStringWithCookiesAndRetry(apiUrl + "&nextid=" + nextId, string.Empty);
@ -187,8 +187,8 @@ namespace Jackett.Common.Indexers
foreach (var releaserow in releaserows)
{
string dateStr = releaserow.QuerySelector(".rls-date").TextContent.Trim();
string title = releaserow.FirstChild.TextContent;
var dateStr = releaserow.QuerySelector(".rls-date").TextContent.Trim();
var title = releaserow.FirstChild.TextContent;
title = title.Replace("SD720p1080p", "");
title = title.Replace(dateStr, "");

View File

@ -19,21 +19,25 @@ using NLog;
namespace Jackett.Common.Indexers
{
class LostFilm : BaseWebIndexer
internal class LostFilm : BaseWebIndexer
{
private static Regex parsePlayEpisodeRegex = new Regex("PlayEpisode\\('(?<id>\\d{1,3})(?<season>\\d{3})(?<episode>\\d{3})'\\)", RegexOptions.Compiled | RegexOptions.IgnoreCase);
private static Regex parseReleaseDetailsRegex = new Regex("Видео:\\ (?<quality>.+).\\ Размер:\\ (?<size>.+).\\ Перевод", RegexOptions.Compiled | RegexOptions.IgnoreCase);
private static readonly Regex parsePlayEpisodeRegex = new Regex("PlayEpisode\\('(?<id>\\d{1,3})(?<season>\\d{3})(?<episode>\\d{3})'\\)", RegexOptions.Compiled | RegexOptions.IgnoreCase);
private static readonly Regex parseReleaseDetailsRegex = new Regex("Видео:\\ (?<quality>.+).\\ Размер:\\ (?<size>.+).\\ Перевод", RegexOptions.Compiled | RegexOptions.IgnoreCase);
private string LoginUrl { get { return SiteLink + "login"; } }
string LoginUrl { get { return SiteLink + "login"; } }
// http://www.lostfilm.tv/login
string ApiUrl { get { return SiteLink + "ajaxik.php"; } }
private string ApiUrl { get { return SiteLink + "ajaxik.php"; } }
// http://www.lostfilm.tv/new
string DiscoveryUrl { get { return SiteLink + "new"; } }
private string DiscoveryUrl { get { return SiteLink + "new"; } }
// http://www.lostfilm.tv/search?q=breaking+bad
string SearchUrl { get { return SiteLink + "search"; } }
private string SearchUrl { get { return SiteLink + "search"; } }
// PlayEpisode function produce urls like this:
// https://www.lostfilm.tv/v_search.php?c=119&s=5&e=16
string ReleaseUrl { get { return SiteLink + "v_search.php"; } }
private string ReleaseUrl { get { return SiteLink + "v_search.php"; } }
internal class TrackerUrlDetails
@ -77,7 +81,7 @@ namespace Jackett.Common.Indexers
}
}
new ConfigurationDataCaptchaLogin configData
private new ConfigurationDataCaptchaLogin configData
{
get { return (ConfigurationDataCaptchaLogin)base.configData; }
set { base.configData = value; }
@ -104,7 +108,7 @@ namespace Jackett.Common.Indexers
// looks like after some failed login attempts there's a captcha
var loginPage = await RequestStringWithCookies(LoginUrl, string.Empty);
CQ dom = loginPage.Content;
CQ qCaptchaImg = dom.Find("img#captcha_pictcha").First();
var qCaptchaImg = dom.Find("img#captcha_pictcha").First();
if (qCaptchaImg.Length == 1)
{
var CaptchaUrl = SiteLink + qCaptchaImg.Attr("src");
@ -161,7 +165,7 @@ namespace Jackett.Common.Indexers
return IndexerConfigurationStatus.RequiresTesting;
}
private async Task<Boolean> Logout()
private async Task<bool> Logout()
{
logger.Info("Performing logout");

View File

@ -131,8 +131,8 @@ namespace Jackett.Common.Indexers.Meta
public IEnumerable<IIndexer> Indexers;
private Func<IIndexer, bool> filterFunc;
private IFallbackStrategyProvider fallbackStrategyProvider;
private IResultFilterProvider resultFilterProvider;
private readonly Func<IIndexer, bool> filterFunc;
private readonly IFallbackStrategyProvider fallbackStrategyProvider;
private readonly IResultFilterProvider resultFilterProvider;
}
}

View File

@ -38,7 +38,7 @@ namespace Jackett.Common.Indexers.Meta
public ImdbFallbackStrategy(IImdbResolver resolver, TorznabQuery query)
{
this.resolver = resolver;
this.titles = null;
titles = null;
this.query = query;
}
@ -49,9 +49,9 @@ namespace Jackett.Common.Indexers.Meta
return titles.Select(t => query.CreateFallback(t));
}
private IImdbResolver resolver;
private readonly IImdbResolver resolver;
private IEnumerable<string> titles;
private TorznabQuery query;
private readonly TorznabQuery query;
}
public class ImdbFallbackStrategyProvider : IFallbackStrategyProvider
@ -71,6 +71,6 @@ namespace Jackett.Common.Indexers.Meta
return result;
}
private IImdbResolver resolver;
private readonly IImdbResolver resolver;
}
}

View File

@ -1,4 +1,3 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
@ -31,8 +30,8 @@ namespace Jackett.Common.Indexers.Meta
long? imdbId = null;
try
{
var normalizedImdbId = String.Concat(query.ImdbID.Where(c => char.IsDigit(c)));
imdbId = Int64.Parse(normalizedImdbId);
var normalizedImdbId = string.Concat(query.ImdbID.Where(c => char.IsDigit(c)));
imdbId = long.Parse(normalizedImdbId);
}
catch
{
@ -89,8 +88,8 @@ namespace Jackett.Common.Indexers.Meta
return result;
}
private IImdbResolver resolver;
private TorznabQuery query;
private readonly IImdbResolver resolver;
private readonly TorznabQuery query;
}
public class NoFilter : IResultFilter
@ -126,6 +125,6 @@ namespace Jackett.Common.Indexers.Meta
return filter.ToEnumerable();
}
private IImdbResolver resolver;
private readonly IImdbResolver resolver;
}
}

View File

@ -168,7 +168,7 @@ namespace Jackett.Common.Indexers
{
var releases = new List<ReleaseInfo>();
NameValueCollection qParams = new NameValueCollection();
var qParams = new NameValueCollection();
qParams.Add("tor[text]", query.GetQueryString());
qParams.Add("tor[srchIn][title]", "true");
qParams.Add("tor[srchIn][author]", "true");
@ -184,11 +184,11 @@ namespace Jackett.Common.Indexers
qParams.Add("description", "1"); // include the description
//qParams.Add("bookmarks", "0"); // include if the item is bookmarked or not
List<string> catList = MapTorznabCapsToTrackers(query);
var catList = MapTorznabCapsToTrackers(query);
if (catList.Any())
{
int index = 0;
foreach (string cat in catList)
var index = 0;
foreach (var cat in catList)
{
qParams.Add("tor[cat][" + index + "]", cat);
index++;
@ -199,7 +199,7 @@ namespace Jackett.Common.Indexers
qParams.Add("tor[cat][]", "0");
}
string urlSearch = SearchUrl;
var urlSearch = SearchUrl;
if (qParams.Count > 0)
{
urlSearch += $"?{qParams.GetQueryString()}";

View File

@ -20,7 +20,7 @@ namespace Jackett.Common.Indexers
{
private string LoginUrl { get { return SiteLink + "login.php"; } }
private string SearchUrl { get { return SiteLink + "torrents.php"; } }
private string[] LanguageCats = new string[] { "xvidser", "dvdser", "hdser", "xvid", "dvd", "dvd9", "hd", "mp3", "lossless", "ebook" };
private readonly string[] LanguageCats = new string[] { "xvidser", "dvdser", "hdser", "xvid", "dvd", "dvd9", "hd", "mp3", "lossless", "ebook" };
private new ConfigurationDataNCore configData
{
@ -117,7 +117,7 @@ namespace Jackett.Common.Indexers
return IndexerConfigurationStatus.RequiresTesting;
}
List<ReleaseInfo> parseTorrents(WebClientStringResult results, String seasonep, TorznabQuery query, int already_founded, int limit, int previously_parsed_on_page)
private List<ReleaseInfo> parseTorrents(WebClientStringResult results, string seasonep, TorznabQuery query, int already_founded, int limit, int previously_parsed_on_page)
{
var releases = new List<ReleaseInfo>();
try
@ -128,11 +128,11 @@ namespace Jackett.Common.Indexers
var rows = dom[".box_torrent_all"].Find(".box_torrent");
// Check torrents only till we reach the query Limit
for (int i = previously_parsed_on_page; (i < rows.Length && ((already_founded + releases.Count) < limit)); i++)
for (var i = previously_parsed_on_page; (i < rows.Length && ((already_founded + releases.Count) < limit)); i++)
{
try
{
CQ qRow = rows[i].Cq();
var qRow = rows[i].Cq();
var key = dom["link[rel=alternate]"].First().Attr("href").Split('=').Last();
release = new ReleaseInfo();
@ -146,8 +146,8 @@ namespace Jackett.Common.Indexers
release.DownloadVolumeFactor = 0;
release.UploadVolumeFactor = 1;
string downloadLink = SiteLink + torrentTxt.GetAttribute("href");
string downloadId = downloadLink.Substring(downloadLink.IndexOf("&id=") + 4);
var downloadLink = SiteLink + torrentTxt.GetAttribute("href");
var downloadId = downloadLink.Substring(downloadLink.IndexOf("&id=") + 4);
release.Link = new Uri(SiteLink.ToString() + "torrents.php?action=download&id=" + downloadId + "&key=" + key);
release.Comments = new Uri(SiteLink.ToString() + "torrents.php?action=details&id=" + downloadId);
@ -160,16 +160,16 @@ namespace Jackett.Common.Indexers
var banner = qRow.Find("img.infobar_ico").Attr("onmouseover");
if (banner != null)
{
Regex BannerRegEx = new Regex(@"mutat\('(.*?)', '", RegexOptions.Compiled);
var BannerRegEx = new Regex(@"mutat\('(.*?)', '", RegexOptions.Compiled);
var BannerMatch = BannerRegEx.Match(banner);
var bannerurl = BannerMatch.Groups[1].Value;
release.BannerUrl = new Uri(bannerurl);
}
release.PublishDate = DateTime.Parse(qRow.Find(".box_feltoltve2").Get(0).InnerHTML.Replace("<br />", " "), CultureInfo.InvariantCulture);
string[] sizeSplit = qRow.Find(".box_meret2").Get(0).InnerText.Split(' ');
var sizeSplit = qRow.Find(".box_meret2").Get(0).InnerText.Split(' ');
release.Size = ReleaseInfo.GetBytes(sizeSplit[1].ToLower(), ParseUtil.CoerceFloat(sizeSplit[0]));
string catlink = qRow.Find("a:has(img[class='categ_link'])").First().Attr("href");
string cat = ParseUtil.GetArgumentFromQueryString(catlink, "tipus");
var catlink = qRow.Find("a:has(img[class='categ_link'])").First().Attr("href");
var cat = ParseUtil.GetArgumentFromQueryString(catlink, "tipus");
release.Category = MapTrackerCatToNewznab(cat);
/* if the release name not contains the language we add it because it is know from category */
@ -187,14 +187,14 @@ namespace Jackett.Common.Indexers
var temp = release.Title;
// releasedata everithing after Name.S0Xe0X
String releasedata = release.Title.Split(new[] { seasonep }, StringSplitOptions.None)[1].Trim();
var releasedata = release.Title.Split(new[] { seasonep }, StringSplitOptions.None)[1].Trim();
/* if the release name not contains the language we add it because it is know from category */
if (cat.Contains("hun") && !releasedata.Contains("hun"))
releasedata += ".hun";
// release description contains [imdb: ****] but we only need the data before it for title
String[] description = { release.Description, "" };
string[] description = { release.Description, "" };
if (release.Description.Contains("[imdb:"))
{
description = release.Description.Split('[');
@ -204,7 +204,7 @@ namespace Jackett.Common.Indexers
release.Title = (description[0].Trim() + "." + seasonep.Trim() + "." + releasedata.Trim('.')).Replace(' ', '.');
// if search is done for S0X than we dont want to put . between S0X and E0X
Match match = Regex.Match(releasedata, @"^E\d\d?");
var match = Regex.Match(releasedata, @"^E\d\d?");
if (seasonep.Length == 3 && match.Success)
release.Title = (description[0].Trim() + "." + seasonep.Trim() + releasedata.Trim('.')).Replace(' ', '.');
@ -230,7 +230,7 @@ namespace Jackett.Common.Indexers
return releases;
}
protected async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query, String seasonep)
protected async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query, string seasonep)
{
var releases = new List<ReleaseInfo>();
var searchString = query.GetQueryString();
@ -271,14 +271,14 @@ namespace Jackett.Common.Indexers
CQ dom = results.Content;
int numVal = 0;
var numVal = 0;
// find number of torrents / page
int torrent_per_page = dom[".box_torrent_all"].Find(".box_torrent").Length;
var torrent_per_page = dom[".box_torrent_all"].Find(".box_torrent").Length;
if (torrent_per_page == 0)
return releases;
int start_page = (query.Offset / torrent_per_page) + 1;
int previously_parsed_on_page = query.Offset - (start_page * torrent_per_page) + 1; //+1 because indexing start from 0
var start_page = (query.Offset / torrent_per_page) + 1;
var previously_parsed_on_page = query.Offset - (start_page * torrent_per_page) + 1; //+1 because indexing start from 0
if (previously_parsed_on_page < 0)
previously_parsed_on_page = query.Offset;
@ -287,13 +287,13 @@ namespace Jackett.Common.Indexers
if (pagelinks.Length > 0)
{
// If there are several pages find the link for the latest one
for (int i = pagelinks.Length - 1; i > 0; i--)
for (var i = pagelinks.Length - 1; i > 0; i--)
{
var last_page_link = (pagelinks[i].Cq().Attr("href")).Trim();
if (last_page_link.Contains("oldal"))
{
Match match = Regex.Match(last_page_link, @"(?<=[\?,&]oldal=)(\d+)(?=&)");
numVal = Int32.Parse(match.Value);
var match = Regex.Match(last_page_link, @"(?<=[\?,&]oldal=)(\d+)(?=&)");
numVal = int.Parse(match.Value);
break;
}
}
@ -313,7 +313,7 @@ namespace Jackett.Common.Indexers
// Check all the pages for the torrents.
// The starting index is 2. (the first one is the original where we parse out the pages.)
for (int i = start_page; (i <= numVal && releases.Count < limit); i++)
for (var i = start_page; (i <= numVal && releases.Count < limit); i++)
{
pairs.Add(new KeyValuePair<string, string>("oldal", i.ToString()));
results = await PostDataWithCookiesAndRetry(SearchUrl, pairs);

View File

@ -118,12 +118,12 @@ namespace Jackett.Common.Indexers
protected override async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query)
{
TimeZoneInfo.TransitionTime startTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 3, 0, 0), 3, 5, DayOfWeek.Sunday);
TimeZoneInfo.TransitionTime endTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 4, 0, 0), 10, 5, DayOfWeek.Sunday);
TimeSpan delta = new TimeSpan(1, 0, 0);
TimeZoneInfo.AdjustmentRule adjustment = TimeZoneInfo.AdjustmentRule.CreateAdjustmentRule(new DateTime(1999, 10, 1), DateTime.MaxValue.Date, delta, startTransition, endTransition);
var startTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 3, 0, 0), 3, 5, DayOfWeek.Sunday);
var endTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 4, 0, 0), 10, 5, DayOfWeek.Sunday);
var delta = new TimeSpan(1, 0, 0);
var adjustment = TimeZoneInfo.AdjustmentRule.CreateAdjustmentRule(new DateTime(1999, 10, 1), DateTime.MaxValue.Date, delta, startTransition, endTransition);
TimeZoneInfo.AdjustmentRule[] adjustments = { adjustment };
TimeZoneInfo germanyTz = TimeZoneInfo.CreateCustomTimeZone("W. Europe Standard Time", new TimeSpan(1, 0, 0), "(GMT+01:00) W. Europe Standard Time", "W. Europe Standard Time", "W. Europe DST Time", adjustments);
var germanyTz = TimeZoneInfo.CreateCustomTimeZone("W. Europe Standard Time", new TimeSpan(1, 0, 0), "(GMT+01:00) W. Europe Standard Time", "W. Europe Standard Time", "W. Europe DST Time", adjustments);
var releases = new List<ReleaseInfo>();
@ -141,7 +141,7 @@ namespace Jackett.Common.Indexers
}
var cats = MapTorznabCapsToTrackers(query);
string cat = "0";
var cat = "0";
if (cats.Count == 1)
{
cat = cats[0];
@ -205,7 +205,7 @@ namespace Jackett.Common.Indexers
var dateStr = qDateStr.Text().Replace('\xA0', ' ');
var dateGerman = DateTime.SpecifyKind(DateTime.ParseExact(dateStr, "dd.MM.yyyy HH:mm:ss", CultureInfo.InvariantCulture), DateTimeKind.Unspecified);
DateTime pubDateUtc = TimeZoneInfo.ConvertTimeToUtc(dateGerman, germanyTz);
var pubDateUtc = TimeZoneInfo.ConvertTimeToUtc(dateGerman, germanyTz);
release.PublishDate = pubDateUtc;
var files = qRow.Find("td:contains(Datei) > strong ~ strong").Text();

View File

@ -19,13 +19,13 @@ namespace Jackett.Common.Indexers
{
public class Newpct : BaseCachingWebIndexer
{
enum ReleaseType
private enum ReleaseType
{
TV,
Movie,
}
class NewpctRelease : ReleaseInfo
private class NewpctRelease : ReleaseInfo
{
public ReleaseType NewpctReleaseType;
public string SeriesName;
@ -55,21 +55,21 @@ namespace Jackett.Common.Indexers
}
}
class DownloadMatcher
private class DownloadMatcher
{
public Regex MatchRegex;
public MatchEvaluator MatchEvaluator;
}
private static Uri DefaultSiteLinkUri =
private static readonly Uri DefaultSiteLinkUri =
new Uri("https://descargas2020.org");
private static Uri[] ExtraSiteLinkUris = new Uri[]
private static readonly Uri[] ExtraSiteLinkUris = new Uri[]
{
new Uri("https://pctnew.org"),
};
private static Uri[] LegacySiteLinkUris = new Uri[]
private static readonly Uri[] LegacySiteLinkUris = new Uri[]
{
new Uri("http://descargas2020.com/"),
new Uri("http://www.tvsinpagar.com/"),
@ -82,13 +82,13 @@ namespace Jackett.Common.Indexers
};
private NewpctRelease _mostRecentRelease;
private char[] _wordSeparators = new char[] { ' ', '.', ',', ';', '(', ')', '[', ']', '-', '_' };
private int _wordNotFoundScore = 100000;
private Regex _searchStringRegex = new Regex(@"(.+?)S0?(\d+)(E0?(\d+))?$", RegexOptions.IgnoreCase);
private Regex _titleListRegex = new Regex(@"Serie( *Descargar)?(.+?)(Temporada(.+?)(\d+)(.+?))?Capitulos?(.+?)(\d+)((.+?)(\d+))?(.+?)-(.+?)Calidad(.*)", RegexOptions.IgnoreCase);
private Regex _titleClassicRegex = new Regex(@"(\[[^\]]*\])?\[Cap\.(\d{1,2})(\d{2})([_-](\d{1,2})(\d{2}))?\]", RegexOptions.IgnoreCase);
private Regex _titleClassicTvQualityRegex = new Regex(@"\[([^\]]*HDTV[^\]]*)", RegexOptions.IgnoreCase);
private DownloadMatcher[] _downloadMatchers = new DownloadMatcher[]
private readonly char[] _wordSeparators = new char[] { ' ', '.', ',', ';', '(', ')', '[', ']', '-', '_' };
private readonly int _wordNotFoundScore = 100000;
private readonly Regex _searchStringRegex = new Regex(@"(.+?)S0?(\d+)(E0?(\d+))?$", RegexOptions.IgnoreCase);
private readonly Regex _titleListRegex = new Regex(@"Serie( *Descargar)?(.+?)(Temporada(.+?)(\d+)(.+?))?Capitulos?(.+?)(\d+)((.+?)(\d+))?(.+?)-(.+?)Calidad(.*)", RegexOptions.IgnoreCase);
private readonly Regex _titleClassicRegex = new Regex(@"(\[[^\]]*\])?\[Cap\.(\d{1,2})(\d{2})([_-](\d{1,2})(\d{2}))?\]", RegexOptions.IgnoreCase);
private readonly Regex _titleClassicTvQualityRegex = new Regex(@"\[([^\]]*HDTV[^\]]*)", RegexOptions.IgnoreCase);
private readonly DownloadMatcher[] _downloadMatchers = new DownloadMatcher[]
{
new DownloadMatcher()
{
@ -101,11 +101,11 @@ namespace Jackett.Common.Indexers
},
};
private int _maxDailyPages = 7;
private int _maxMoviesPages = 30;
private int _maxEpisodesListPages = 100;
private int[] _allTvCategories = (new TorznabCategory[] { TorznabCatType.TV }).Concat(TorznabCatType.TV.SubCategories).Select(c => c.ID).ToArray();
private int[] _allMoviesCategories = (new TorznabCategory[] { TorznabCatType.Movies }).Concat(TorznabCatType.Movies.SubCategories).Select(c => c.ID).ToArray();
private readonly int _maxDailyPages = 7;
private readonly int _maxMoviesPages = 30;
private readonly int _maxEpisodesListPages = 100;
private readonly int[] _allTvCategories = (new TorznabCategory[] { TorznabCatType.TV }).Concat(TorznabCatType.TV.SubCategories).Select(c => c.ID).ToArray();
private readonly int[] _allMoviesCategories = (new TorznabCategory[] { TorznabCatType.Movies }).Concat(TorznabCatType.Movies.SubCategories).Select(c => c.ID).ToArray();
private bool _includeVo;
private bool _filterMovies;
@ -113,13 +113,13 @@ namespace Jackett.Common.Indexers
private DateTime _dailyNow;
private int _dailyResultIdx;
private string _searchUrl = "/buscar";
private string _searchJsonUrl = "/get/result/";
private string _dailyUrl = "/ultimas-descargas/pg/{0}";
private string[] _seriesLetterUrls = new string[] { "/series/letter/{0}", "/series-hd/letter/{0}" };
private string[] _seriesVOLetterUrls = new string[] { "/series-vo/letter/{0}" };
private string _seriesUrl = "{0}/pg/{1}";
private string[] _voUrls = new string[] { "serie-vo", "serievo" };
private readonly string _searchUrl = "/buscar";
private readonly string _searchJsonUrl = "/get/result/";
private readonly string _dailyUrl = "/ultimas-descargas/pg/{0}";
private readonly string[] _seriesLetterUrls = new string[] { "/series/letter/{0}", "/series-hd/letter/{0}" };
private readonly string[] _seriesVOLetterUrls = new string[] { "/series-vo/letter/{0}" };
private readonly string _seriesUrl = "{0}/pg/{1}";
private readonly string[] _voUrls = new string[] { "serie-vo", "serievo" };
public override string[] LegacySiteLinks { get; protected set; } = LegacySiteLinkUris.Select(u => u.AbsoluteUri).ToArray();
@ -166,7 +166,7 @@ namespace Jackett.Common.Indexers
protected override async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query)
{
Uri link = new Uri(configData.SiteLink.Value);
var link = new Uri(configData.SiteLink.Value);
lock (cache)
{
@ -178,9 +178,9 @@ namespace Jackett.Common.Indexers
public override async Task<byte[]> Download(Uri linkParam)
{
IEnumerable<Uri> uris = GetLinkUris(linkParam);
var uris = GetLinkUris(linkParam);
foreach (Uri uri in uris)
foreach (var uri in uris)
{
byte[] result = null;
@ -192,7 +192,7 @@ namespace Jackett.Common.Indexers
if (content != null)
{
Uri uriLink = ExtractDownloadUri(content, uri.AbsoluteUri);
var uriLink = ExtractDownloadUri(content, uri.AbsoluteUri);
if (uriLink != null)
result = await base.Download(uriLink);
}
@ -204,7 +204,7 @@ namespace Jackett.Common.Indexers
if (result != null)
return result;
else
this.logger.Warn("Newpct - download link not found in " + uri.LocalPath);
logger.Warn("Newpct - download link not found in " + uri.LocalPath);
}
return null;
@ -212,9 +212,9 @@ namespace Jackett.Common.Indexers
private Uri ExtractDownloadUri(string content, string baseLink)
{
foreach (DownloadMatcher matcher in _downloadMatchers)
foreach (var matcher in _downloadMatchers)
{
Match match = matcher.MatchRegex.Match(content);
var match = matcher.MatchRegex.Match(content);
if (match.Success)
{
string linkText;
@ -231,9 +231,9 @@ namespace Jackett.Common.Indexers
return null;
}
IEnumerable<Uri> GetLinkUris(Uri referenceLink)
private IEnumerable<Uri> GetLinkUris(Uri referenceLink)
{
List<Uri> uris = new List<Uri>();
var uris = new List<Uri>();
uris.Add(referenceLink);
if (DefaultSiteLinkUri.Scheme != referenceLink.Scheme && DefaultSiteLinkUri.Host != referenceLink.Host)
uris.Add(DefaultSiteLinkUri);
@ -243,11 +243,11 @@ namespace Jackett.Common.Indexers
(u.Scheme != referenceLink.Scheme || u.Host != referenceLink.Host) &&
(u.Scheme != DefaultSiteLinkUri.Scheme || u.Host != DefaultSiteLinkUri.Host))).ToList();
List<Uri> result = new List<Uri>();
var result = new List<Uri>();
foreach (Uri uri in uris)
foreach (var uri in uris)
{
UriBuilder ub = new UriBuilder(uri);
var ub = new UriBuilder(uri);
ub.Path = referenceLink.LocalPath;
result.Add(ub.Uri);
}
@ -264,11 +264,11 @@ namespace Jackett.Common.Indexers
_removeMovieAccents = ((BoolItem)configData.GetDynamic("RemoveMovieAccents")).Value;
_dailyNow = DateTime.Now;
_dailyResultIdx = 0;
bool rssMode = string.IsNullOrEmpty(query.SanitizedSearchTerm);
var rssMode = string.IsNullOrEmpty(query.SanitizedSearchTerm);
if (rssMode)
{
int pg = 1;
var pg = 1;
Uri validUri = null;
while (pg <= _maxDailyPages)
{
@ -277,7 +277,7 @@ namespace Jackett.Common.Indexers
if (validUri != null)
{
Uri uri = new Uri(validUri, string.Format(_dailyUrl, pg));
var uri = new Uri(validUri, string.Format(_dailyUrl, pg));
results = await RequestStringWithCookiesAndRetry(uri.AbsoluteUri);
if (results == null || string.IsNullOrEmpty(results.Content))
break;
@ -286,7 +286,7 @@ namespace Jackett.Common.Indexers
}
else
{
foreach (Uri uri in GetLinkUris(new Uri(siteLink, string.Format(_dailyUrl, pg))))
foreach (var uri in GetLinkUris(new Uri(siteLink, string.Format(_dailyUrl, pg))))
{
results = await RequestStringWithCookiesAndRetry(uri.AbsoluteUri);
if (results != null && !string.IsNullOrEmpty(results.Content))
@ -308,7 +308,7 @@ namespace Jackett.Common.Indexers
releases.AddRange(items);
//Check if we need to go to next page
bool recentFound = _mostRecentRelease != null &&
var recentFound = _mostRecentRelease != null &&
items.Any(r => r.Title == _mostRecentRelease.Title && r.Link.AbsoluteUri == _mostRecentRelease.Link.AbsoluteUri);
if (pg == 1)
_mostRecentRelease = (NewpctRelease)items.First().Clone();
@ -320,14 +320,14 @@ namespace Jackett.Common.Indexers
}
else
{
bool isTvSearch = query.Categories == null || query.Categories.Length == 0 ||
var isTvSearch = query.Categories == null || query.Categories.Length == 0 ||
query.Categories.Any(c => _allTvCategories.Contains(c));
if (isTvSearch)
{
releases.AddRange(await TvSearch(siteLink, query));
}
bool isMovieSearch = query.Categories == null || query.Categories.Length == 0 ||
var isMovieSearch = query.Categories == null || query.Categories.Length == 0 ||
query.Categories.Any(c => _allMoviesCategories.Contains(c));
if (isMovieSearch)
{
@ -346,16 +346,16 @@ namespace Jackett.Common.Indexers
{
List<ReleaseInfo> newpctReleases = null;
string seriesName = query.SanitizedSearchTerm;
int? season = query.Season > 0 ? (int?)query.Season : null;
var seriesName = query.SanitizedSearchTerm;
var season = query.Season > 0 ? (int?)query.Season : null;
int? episode = null;
if (!string.IsNullOrWhiteSpace(query.Episode) && int.TryParse(query.Episode, out int episodeTemp))
if (!string.IsNullOrWhiteSpace(query.Episode) && int.TryParse(query.Episode, out var episodeTemp))
episode = episodeTemp;
//If query has no season/episode info, try to parse title
if (season == null && episode == null)
{
Match searchMatch = _searchStringRegex.Match(query.SanitizedSearchTerm);
var searchMatch = _searchStringRegex.Match(query.SanitizedSearchTerm);
if (searchMatch.Success)
{
seriesName = searchMatch.Groups[1].Value.Trim();
@ -377,7 +377,7 @@ namespace Jackett.Common.Indexers
newpctReleases = new List<ReleaseInfo>();
//Search series url
foreach (Uri seriesListUrl in SeriesListUris(siteLink, seriesName))
foreach (var seriesListUrl in SeriesListUris(siteLink, seriesName))
{
newpctReleases.AddRange(await GetReleasesFromUri(seriesListUrl, seriesName));
}
@ -386,7 +386,7 @@ namespace Jackett.Common.Indexers
if (newpctReleases.Count == 0 && !(seriesName.ToLower().StartsWith("the")))
{
seriesName = "The " + seriesName;
foreach (Uri seriesListUrl in SeriesListUris(siteLink, seriesName))
foreach (var seriesListUrl in SeriesListUris(siteLink, seriesName))
{
newpctReleases.AddRange(await GetReleasesFromUri(seriesListUrl, seriesName));
}
@ -405,7 +405,7 @@ namespace Jackett.Common.Indexers
//Filter only episodes needed
return newpctReleases.Where(r =>
{
NewpctRelease nr = r as NewpctRelease;
var nr = r as NewpctRelease;
return (
nr.Season.HasValue != season.HasValue || //Can't determine if same season
nr.Season.HasValue && season.Value == nr.Season.Value && //Same season and ...
@ -428,13 +428,13 @@ namespace Jackett.Common.Indexers
await FollowIfRedirect(results);
//Episodes list
string seriesEpisodesUrl = ParseSeriesListContent(results.Content, seriesName);
var seriesEpisodesUrl = ParseSeriesListContent(results.Content, seriesName);
if (!string.IsNullOrEmpty(seriesEpisodesUrl))
{
int pg = 1;
var pg = 1;
while (pg < _maxEpisodesListPages)
{
Uri episodesListUrl = new Uri(string.Format(_seriesUrl, seriesEpisodesUrl, pg));
var episodesListUrl = new Uri(string.Format(_seriesUrl, seriesEpisodesUrl, pg));
results = await RequestStringWithCookiesAndRetry(episodesListUrl.AbsoluteUri);
await FollowIfRedirect(results);
@ -461,7 +461,7 @@ namespace Jackett.Common.Indexers
{
lettersUrl = _seriesLetterUrls.Concat(_seriesVOLetterUrls);
}
string seriesLetter = !char.IsDigit(seriesName[0]) ? seriesName[0].ToString() : "0-9";
var seriesLetter = !char.IsDigit(seriesName[0]) ? seriesName[0].ToString() : "0-9";
return lettersUrl.Select(urlFormat =>
{
return new Uri(siteLink, string.Format(urlFormat, seriesLetter.ToLower()));
@ -473,7 +473,7 @@ namespace Jackett.Common.Indexers
var SearchResultParser = new HtmlParser();
var doc = SearchResultParser.ParseDocument(content);
List<NewpctRelease> releases = new List<NewpctRelease>();
var releases = new List<NewpctRelease>();
try
{
@ -492,7 +492,7 @@ namespace Jackett.Common.Indexers
var span = row.QuerySelector("span");
var quality = span.ChildNodes[0].TextContent.Trim();
ReleaseType releaseType = ReleaseTypeFromQuality(quality);
var releaseType = ReleaseTypeFromQuality(quality);
var sizeText = span.ChildNodes[1].TextContent.Replace("Tama\u00F1o", "").Trim();
var div = row.QuerySelector("div");
@ -525,7 +525,7 @@ namespace Jackett.Common.Indexers
var SearchResultParser = new HtmlParser();
var doc = SearchResultParser.ParseDocument(content);
Dictionary<string, string> results = new Dictionary<string, string>();
var results = new Dictionary<string, string>();
try
{
@ -550,7 +550,7 @@ namespace Jackett.Common.Indexers
var SearchResultParser = new HtmlParser();
var doc = SearchResultParser.ParseDocument(content);
List<NewpctRelease> releases = new List<NewpctRelease>();
var releases = new List<NewpctRelease>();
try
{
@ -565,9 +565,9 @@ namespace Jackett.Common.Indexers
var pubDateText = row.ChildNodes[3].TextContent.Trim();
var sizeText = row.ChildNodes[5].TextContent.Trim();
long size = ReleaseInfo.GetBytes(sizeText);
DateTime publishDate = DateTime.ParseExact(pubDateText, "dd-MM-yyyy", null);
NewpctRelease newpctRelease = GetReleaseFromData(ReleaseType.TV, title, detailsUrl, null, null, size, publishDate);
var size = ReleaseInfo.GetBytes(sizeText);
var publishDate = DateTime.ParseExact(pubDateText, "dd-MM-yyyy", null);
var newpctRelease = GetReleaseFromData(ReleaseType.TV, title, detailsUrl, null, null, size, publishDate);
releases.Add(newpctRelease);
}
@ -584,13 +584,13 @@ namespace Jackett.Common.Indexers
{
var releases = new List<NewpctRelease>();
string searchStr = query.SanitizedSearchTerm;
var searchStr = query.SanitizedSearchTerm;
if (_removeMovieAccents)
searchStr = RemoveDiacritics(searchStr);
Uri validUri = null;
bool validUriUsesJson = false;
int pg = 1;
var validUriUsesJson = false;
var pg = 1;
while (pg <= _maxMoviesPages)
{
var queryCollection = new Dictionary<string, string>();
@ -605,7 +605,7 @@ namespace Jackett.Common.Indexers
{
if (validUriUsesJson)
{
Uri uri = new Uri(validUri, _searchJsonUrl);
var uri = new Uri(validUri, _searchJsonUrl);
results = await PostDataWithCookies(uri.AbsoluteUri, queryCollection);
if (results == null || string.IsNullOrEmpty(results.Content))
break;
@ -613,7 +613,7 @@ namespace Jackett.Common.Indexers
}
else
{
Uri uri = new Uri(validUri, _searchUrl);
var uri = new Uri(validUri, _searchUrl);
results = await PostDataWithCookies(uri.AbsoluteUri, queryCollection);
if (results == null || string.IsNullOrEmpty(results.Content))
break;
@ -626,12 +626,12 @@ namespace Jackett.Common.Indexers
{
using (var uris = GetLinkUris(new Uri(siteLink, _searchUrl)).GetEnumerator())
{
bool resultFound = false;
var resultFound = false;
while (jsonUris.MoveNext() && uris.MoveNext() && !resultFound)
{
for (int i = 0; i < 2 && !resultFound; i++)
for (var i = 0; i < 2 && !resultFound; i++)
{
bool usingJson = i == 0;
var usingJson = i == 0;
Uri uri;
if (usingJson)
@ -685,11 +685,11 @@ namespace Jackett.Common.Indexers
private IEnumerable<NewpctRelease> ParseSearchContent(string content)
{
bool someFound = false;
var someFound = false;
var SearchResultParser = new HtmlParser();
var doc = SearchResultParser.ParseDocument(content);
List<NewpctRelease> releases = new List<NewpctRelease>();
var releases = new List<NewpctRelease>();
try
{
@ -705,8 +705,8 @@ namespace Jackett.Common.Indexers
someFound = true;
bool isSeries = h2.QuerySelector("span") != null && h2.TextContent.ToLower().Contains("calidad");
bool isGame = title.ToLower().Contains("pcdvd");
var isSeries = h2.QuerySelector("span") != null && h2.TextContent.ToLower().Contains("calidad");
var isGame = title.ToLower().Contains("pcdvd");
if (isSeries || isGame)
continue;
@ -723,8 +723,7 @@ namespace Jackett.Common.Indexers
catch
{
}
DateTime publishDate;
DateTime.TryParseExact(pubDateText, "dd-MM-yyyy", null, DateTimeStyles.None, out publishDate);
DateTime.TryParseExact(pubDateText, "dd-MM-yyyy", null, DateTimeStyles.None, out var publishDate);
var div = row.QuerySelector("div");
@ -747,12 +746,12 @@ namespace Jackett.Common.Indexers
private IEnumerable<NewpctRelease> ParseSearchJsonContent(Uri uri, string content)
{
bool someFound = false;
var someFound = false;
List<NewpctRelease> releases = new List<NewpctRelease>();
var releases = new List<NewpctRelease>();
//Remove path from uri
UriBuilder ub = new UriBuilder(uri);
var ub = new UriBuilder(uri);
ub.Path = string.Empty;
uri = ub.Uri;
@ -760,21 +759,21 @@ namespace Jackett.Common.Indexers
{
var jo = JObject.Parse(content);
int numItems = int.Parse(jo["data"]["items"].ToString());
for (int i = 0; i < numItems; i++)
var numItems = int.Parse(jo["data"]["items"].ToString());
for (var i = 0; i < numItems; i++)
{
var item = jo["data"]["torrents"]["0"][i.ToString()];
string url = item["guid"].ToString();
string title = item["torrentName"].ToString();
string pubDateText = item["torrentDateAdded"].ToString();
string calidad = item["calidad"].ToString();
string sizeText = item["torrentSize"].ToString();
var url = item["guid"].ToString();
var title = item["torrentName"].ToString();
var pubDateText = item["torrentDateAdded"].ToString();
var calidad = item["calidad"].ToString();
var sizeText = item["torrentSize"].ToString();
someFound = true;
bool isSeries = calidad != null && calidad.ToLower().Contains("hdtv");
bool isGame = title.ToLower().Contains("pcdvd");
var isSeries = calidad != null && calidad.ToLower().Contains("hdtv");
var isGame = title.ToLower().Contains("pcdvd");
if (isSeries || isGame)
continue;
@ -786,11 +785,10 @@ namespace Jackett.Common.Indexers
catch
{
}
DateTime publishDate;
DateTime.TryParseExact(pubDateText, "dd/MM/yyyy", null, DateTimeStyles.None, out publishDate);
DateTime.TryParseExact(pubDateText, "dd/MM/yyyy", null, DateTimeStyles.None, out var publishDate);
NewpctRelease newpctRelease;
string detailsUrl = new Uri(uri, url).AbsoluteUri;
var detailsUrl = new Uri(uri, url).AbsoluteUri;
newpctRelease = GetReleaseFromData(ReleaseType.Movie, title, detailsUrl, calidad, null, size, publishDate);
releases.Add(newpctRelease);
@ -812,20 +810,20 @@ namespace Jackett.Common.Indexers
private void ScoreReleases(IEnumerable<NewpctRelease> releases, string searchTerm)
{
string[] searchWords = searchTerm.ToLower().Split(_wordSeparators, StringSplitOptions.None).
var searchWords = searchTerm.ToLower().Split(_wordSeparators, StringSplitOptions.None).
Select(s => s.Trim()).
Where(s => !string.IsNullOrEmpty(s)).ToArray();
foreach (NewpctRelease release in releases)
foreach (var release in releases)
{
release.Score = 0;
string[] releaseWords = release.Title.ToLower().Split(_wordSeparators, StringSplitOptions.None).
var releaseWords = release.Title.ToLower().Split(_wordSeparators, StringSplitOptions.None).
Select(s => s.Trim()).
Where(s => !string.IsNullOrEmpty(s)).ToArray();
foreach (string search in searchWords)
foreach (var search in searchWords)
{
int index = Array.IndexOf(releaseWords, search);
var index = Array.IndexOf(releaseWords, search);
if (index >= 0)
{
release.Score += index;
@ -839,7 +837,7 @@ namespace Jackett.Common.Indexers
}
}
ReleaseType ReleaseTypeFromQuality(string quality)
private ReleaseType ReleaseTypeFromQuality(string quality)
{
if (quality.Trim().ToLower().StartsWith("hdtv"))
return ReleaseType.TV;
@ -847,36 +845,36 @@ namespace Jackett.Common.Indexers
return ReleaseType.Movie;
}
NewpctRelease GetReleaseFromData(ReleaseType releaseType, string title, string detailsUrl, string quality, string language, long size, DateTime publishDate)
private NewpctRelease GetReleaseFromData(ReleaseType releaseType, string title, string detailsUrl, string quality, string language, long size, DateTime publishDate)
{
NewpctRelease result = new NewpctRelease();
var result = new NewpctRelease();
result.NewpctReleaseType = releaseType;
//Sanitize
title = title.Replace("\t", "").Replace("\x2013", "-");
Match match = _titleListRegex.Match(title);
var match = _titleListRegex.Match(title);
if (match.Success)
{
result.SeriesName = match.Groups[2].Value.Trim(' ', '-');
result.Season = int.Parse(match.Groups[5].Success ? match.Groups[5].Value.Trim() : "1");
result.Episode = int.Parse(match.Groups[8].Value.Trim().PadLeft(2, '0'));
result.EpisodeTo = match.Groups[11].Success ? (int?)int.Parse(match.Groups[11].Value.Trim()) : null;
string audioQuality = match.Groups[13].Value.Trim(' ', '[', ']');
var audioQuality = match.Groups[13].Value.Trim(' ', '[', ']');
if (string.IsNullOrEmpty(language))
language = audioQuality;
quality = match.Groups[14].Value.Trim(' ', '[', ']');
string seasonText = result.Season.ToString();
string episodeText = seasonText + result.Episode.ToString().PadLeft(2, '0');
string episodeToText = result.EpisodeTo.HasValue ? "_" + seasonText + result.EpisodeTo.ToString().PadLeft(2, '0') : "";
var seasonText = result.Season.ToString();
var episodeText = seasonText + result.Episode.ToString().PadLeft(2, '0');
var episodeToText = result.EpisodeTo.HasValue ? "_" + seasonText + result.EpisodeTo.ToString().PadLeft(2, '0') : "";
result.Title = string.Format("{0} - Temporada {1} [{2}][Cap.{3}{4}][{5}]",
result.SeriesName, seasonText, quality, episodeText, episodeToText, audioQuality);
}
else
{
Match matchClassic = _titleClassicRegex.Match(title);
var matchClassic = _titleClassicRegex.Match(title);
if (matchClassic.Success)
{
result.Season = matchClassic.Groups[2].Success ? (int?)int.Parse(matchClassic.Groups[2].Value) : null;
@ -922,7 +920,7 @@ namespace Jackett.Common.Indexers
private string FixedTitle(NewpctRelease release, string quality, string language)
{
if (String.IsNullOrEmpty(release.SeriesName))
if (string.IsNullOrEmpty(release.SeriesName))
{
release.SeriesName = release.Title;
if (release.NewpctReleaseType == ReleaseType.TV && release.SeriesName.Contains("-"))
@ -935,7 +933,7 @@ namespace Jackett.Common.Indexers
if (release.NewpctReleaseType == ReleaseType.TV)
{
if (String.IsNullOrEmpty(quality))
if (string.IsNullOrEmpty(quality))
quality = "HDTV";
var seasonAndEpisode = "S" + release.Season.ToString().PadLeft(2, '0');
@ -965,7 +963,7 @@ namespace Jackett.Common.Indexers
titleParts.Add("Spanish");
}
string result = String.Join(".", titleParts);
var result = string.Join(".", titleParts);
result = Regex.Replace(result, @"[\[\]]+", ".");
result = Regex.Replace(result, @"\.[ \.]*\.", ".");

View File

@ -342,7 +342,7 @@ namespace Jackett.Common.Indexers
Output("Leechers: " + leechers);
// Completed
Regex regexObj = new Regex(@"[^\d]");
var regexObj = new Regex(@"[^\d]");
var completed2 = tRow.Find("td:eq(7)").Text();
var completed = ParseUtil.CoerceLong(regexObj.Replace(completed2, ""));
Output("Completed: " + completed);
@ -377,7 +377,7 @@ namespace Jackett.Common.Indexers
// Torrent Download URL
var passkey = tRow.Find("td:eq(1) > a:eq(1)").Attr("href");
var key = Regex.Match(passkey, "(?<=passkey\\=)([a-zA-z0-9]*)");
Uri downloadLink = new Uri(TorrentDownloadUrl.Replace("{id}", id.ToString()).Replace("{passkey}", key.ToString()));
var downloadLink = new Uri(TorrentDownloadUrl.Replace("{id}", id.ToString()).Replace("{passkey}", key.ToString()));
Output("Download Link: " + downloadLink.AbsoluteUri);
// Building release infos
@ -441,7 +441,7 @@ namespace Jackett.Common.Indexers
{
var parameters = new NameValueCollection();
var categoriesList = MapTorznabCapsToTrackers(query);
string searchterm = term;
var searchterm = term;
// Building our tracker query
parameters.Add("incldead", "1");

View File

@ -345,25 +345,25 @@ namespace Jackett.Common.Indexers
var name = tRow.Find("td:eq(1) > a:eq(0)").Text();
// Category
string categoryID = tRow.Find("td:eq(0) > a:eq(0)").Attr("href").Split('?').Last();
var categoryID = tRow.Find("td:eq(0) > a:eq(0)").Attr("href").Split('?').Last();
var newznab = MapTrackerCatToNewznab(categoryID);
Output("Category: " + (newznab.Count > 0 ? newznab.First().ToString() : "unknown category") + " (" + categoryID + ")");
// Seeders
int seeders = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(9)").Text(), @"\d+").Value);
var seeders = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(9)").Text(), @"\d+").Value);
Output("Seeders: " + seeders);
// Leechers
int leechers = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(10)").Text(), @"\d+").Value);
var leechers = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(10)").Text(), @"\d+").Value);
Output("Leechers: " + leechers);
// Files
int files = 1;
var files = 1;
files = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(4)").Text(), @"\d+").Value);
Output("Files: " + files);
// Completed
int completed = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(8)").Text(), @"\d+").Value);
var completed = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(8)").Text(), @"\d+").Value);
Output("Completed: " + completed);
// Size
@ -376,7 +376,7 @@ namespace Jackett.Common.Indexers
var datestr = Regex.Replace(dateTimeOrig, @"<[^>]+>|&nbsp;", "").Trim();
datestr = Regex.Replace(datestr, "Today", DateTime.Now.ToString("MMM dd yyyy"), RegexOptions.IgnoreCase);
datestr = Regex.Replace(datestr, "Yesterday", DateTime.Now.Date.AddDays(-1).ToString("MMM dd yyyy"), RegexOptions.IgnoreCase);
DateTime date = DateTimeUtil.FromUnknown(datestr, "DK");
var date = DateTimeUtil.FromUnknown(datestr, "DK");
Output("Released on: " + date);
// Torrent Details URL
@ -390,7 +390,7 @@ namespace Jackett.Common.Indexers
// Torrent Download URL
var passkey = tRow.Find("td:eq(2) > a:eq(0)").Attr("href");
var key = Regex.Match(passkey, "(?<=torrent_pass\\=)([a-zA-z0-9]*)");
Uri downloadLink = new Uri(TorrentDownloadUrl.Replace("{id}", id.ToString()).Replace("{passkey}", key.ToString()));
var downloadLink = new Uri(TorrentDownloadUrl.Replace("{id}", id.ToString()).Replace("{passkey}", key.ToString()));
Output("Download Link: " + downloadLink.AbsoluteUri);
// Building release infos
@ -450,7 +450,7 @@ namespace Jackett.Common.Indexers
{
var parameters = new NameValueCollection();
var categoriesList = MapTorznabCapsToTrackers(query);
string searchterm = term;
var searchterm = term;
// Building our tracker query
parameters.Add("searchin", "title");
@ -473,7 +473,7 @@ namespace Jackett.Common.Indexers
}
// Loop on categories and change the catagories for search purposes
for (int i = 0; i < categoriesList.Count; i++)
for (var i = 0; i < categoriesList.Count; i++)
{
// APPS
if (new[] { "63", "17", "12", "62", "64" }.Any(c => categoriesList[i].Contains(categoriesList[i])))

View File

@ -98,7 +98,7 @@ namespace Jackett.Common.Indexers
{ "user[password]", configData.Password.Value }
};
var result = await RequestLoginAndFollowRedirect(LoginUrl, pairs, String.Empty, false, null, null, true);
var result = await RequestLoginAndFollowRedirect(LoginUrl, pairs, string.Empty, false, null, null, true);
await ConfigureIfOK(result.Cookies, result.Content != null && result.Content.Contains("/odjava"), () =>
{
CQ dom = result.Content;
@ -115,8 +115,8 @@ namespace Jackett.Common.Indexers
WebClientStringResult results = null;
var queryCollection = new NameValueCollection();
List<string> catList = MapTorznabCapsToTrackers(query); // map categories from query to indexer specific
var categ = String.Join(",", catList);
var catList = MapTorznabCapsToTrackers(query); // map categories from query to indexer specific
var categ = string.Join(",", catList);
//create GET request - search URI
queryCollection.Add("offset", "0");
@ -153,7 +153,7 @@ namespace Jackett.Common.Indexers
// parse results
try
{
string RowsSelector = "div.list > div[name=\"torrrow\"]";
var RowsSelector = "div.list > div[name=\"torrrow\"]";
var ResultParser = new HtmlParser();
var SearchResultDocument = ResultParser.ParseDocument(results.Content);
@ -182,8 +182,8 @@ namespace Jackett.Common.Indexers
// Date of torrent creation
var liopis = Row.QuerySelector("div.listeklink div span.middle");
int ind = liopis.TextContent.IndexOf("Naloženo:");
String reldate = liopis.TextContent.Substring(ind + 10, 22);
var ind = liopis.TextContent.IndexOf("Naloženo:");
var reldate = liopis.TextContent.Substring(ind + 10, 22);
release.PublishDate = DateTime.ParseExact(reldate, "dd.MM.yyyy ob HH:mm:ss", CultureInfo.InvariantCulture);
// Is freeleech?

View File

@ -89,13 +89,13 @@ namespace Jackett.Common.Indexers
protected override async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query)
{
var releases = new List<ReleaseInfo>();
bool configGoldenPopcornOnly = configData.FilterString.Value.ToLowerInvariant().Contains("goldenpopcorn");
bool configSceneOnly = configData.FilterString.Value.ToLowerInvariant().Contains("scene");
bool configCheckedOnly = configData.FilterString.Value.ToLowerInvariant().Contains("checked");
bool configFreeOnly = configData.FilterString.Value.ToLowerInvariant().Contains("free");
var configGoldenPopcornOnly = configData.FilterString.Value.ToLowerInvariant().Contains("goldenpopcorn");
var configSceneOnly = configData.FilterString.Value.ToLowerInvariant().Contains("scene");
var configCheckedOnly = configData.FilterString.Value.ToLowerInvariant().Contains("checked");
var configFreeOnly = configData.FilterString.Value.ToLowerInvariant().Contains("free");
string movieListSearchUrl = SearchUrl;
var movieListSearchUrl = SearchUrl;
var queryCollection = new NameValueCollection();
queryCollection.Add("json", "noredirect");
@ -130,11 +130,11 @@ namespace Jackett.Common.Indexers
try
{
//Iterate over the releases for each movie
JObject js_results = JObject.Parse(results.Content);
var js_results = JObject.Parse(results.Content);
foreach (var movie in js_results["Movies"])
{
string movie_title = (string)movie["Title"];
string Year = (string)movie["Year"];
var movie_title = (string)movie["Title"];
var Year = (string)movie["Year"];
var movie_imdbid_str = (string)movie["ImdbId"];
var coverStr = (string)movie["Cover"];
Uri coverUri = null;
@ -143,11 +143,11 @@ namespace Jackett.Common.Indexers
long? movie_imdbid = null;
if (!string.IsNullOrEmpty(movie_imdbid_str))
movie_imdbid = long.Parse(movie_imdbid_str);
string movie_groupid = (string)movie["GroupId"];
var movie_groupid = (string)movie["GroupId"];
foreach (var torrent in movie["Torrents"])
{
var release = new ReleaseInfo();
string release_name = (string)torrent["ReleaseName"];
var release_name = (string)torrent["ReleaseName"];
release.Title = release_name;
release.Description = string.Format("Title: {0}", movie_title);
release.BannerUrl = coverUri;
@ -168,10 +168,9 @@ namespace Jackett.Common.Indexers
release.UploadVolumeFactor = 1;
release.Category = new List<int> { 2000 };
bool golden, scene, check;
bool.TryParse((string)torrent["GoldenPopcorn"], out golden);
bool.TryParse((string)torrent["Scene"], out scene);
bool.TryParse((string)torrent["Checked"], out check);
bool.TryParse((string)torrent["GoldenPopcorn"], out var golden);
bool.TryParse((string)torrent["Scene"], out var scene);
bool.TryParse((string)torrent["Checked"], out var check);
if (configGoldenPopcornOnly && !golden)
{
@ -187,11 +186,11 @@ namespace Jackett.Common.Indexers
}
var titletags = new List<string>();
string Quality = (string)torrent["Quality"];
string Container = (string)torrent["Container"];
string Codec = (string)torrent["Codec"];
string Resolution = (string)torrent["Resolution"];
string Source = (string)torrent["Source"];
var Quality = (string)torrent["Quality"];
var Container = (string)torrent["Container"];
var Codec = (string)torrent["Codec"];
var Resolution = (string)torrent["Resolution"];
var Source = (string)torrent["Source"];
if (Year != null)
{
@ -240,8 +239,7 @@ namespace Jackett.Common.Indexers
if (titletags.Count() > 0)
release.Title += " [" + string.Join(" / ", titletags) + "]";
bool freeleech;
bool.TryParse((string)torrent["FreeleechType"], out freeleech);
bool.TryParse((string)torrent["FreeleechType"], out var freeleech);
if (freeleech)
release.DownloadVolumeFactor = 0;

View File

@ -142,7 +142,7 @@ namespace Jackett.Common.Indexers
results = await RequestStringWithCookies(searchUrl);
}
Regex IMDBRegEx = new Regex(@"tt(\d+)", RegexOptions.Compiled);
var IMDBRegEx = new Regex(@"tt(\d+)", RegexOptions.Compiled);
var hParser = new HtmlParser();
var ResultDocument = hParser.ParseDocument(results.Content);
try

View File

@ -21,8 +21,8 @@ namespace Jackett.Common.Indexers
private string SearchUrl { get { return SiteLink + "torrentsutils.php"; } }
private string LoginUrl { get { return SiteLink + "takelogin.php"; } }
private string CaptchaUrl { get { return SiteLink + "simpleCaptcha.php?numImages=1"; } }
private TimeZoneInfo germanyTz = TimeZoneInfo.CreateCustomTimeZone("W. Europe Standard Time", new TimeSpan(1, 0, 0), "W. Europe Standard Time", "W. Europe Standard Time");
private readonly List<String> categories = new List<string>() { "1080P", "720P", "BDRip", "BluRay", "BRRip", "DVDR", "DVDRip", "FLAC", "MP3", "MP4", "Packs", "R5", "Remux", "TVRip", "WebRip" };
private readonly TimeZoneInfo germanyTz = TimeZoneInfo.CreateCustomTimeZone("W. Europe Standard Time", new TimeSpan(1, 0, 0), "W. Europe Standard Time", "W. Europe Standard Time");
private readonly List<string> categories = new List<string>() { "1080P", "720P", "BDRip", "BluRay", "BRRip", "DVDR", "DVDRip", "FLAC", "MP3", "MP4", "Packs", "R5", "Remux", "TVRip", "WebRip" };
private new ConfigurationDataBasicLoginWithRSSAndDisplay configData
{
@ -45,8 +45,8 @@ namespace Jackett.Common.Indexers
Language = "en-us";
Type = "private";
this.configData.DisplayText.Value = "Only the results from the first search result page are shown, adjust your profile settings to show the maximum.";
this.configData.DisplayText.Name = "Notice";
configData.DisplayText.Value = "Only the results from the first search result page are shown, adjust your profile settings to show the maximum.";
configData.DisplayText.Name = "Notice";
AddCategoryMapping("1080P", TorznabCatType.MoviesHD, "1080P");
AddCategoryMapping("720P", TorznabCatType.MoviesHD, "720P");
@ -92,7 +92,7 @@ namespace Jackett.Common.Indexers
protected override async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query)
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var releases = new List<ReleaseInfo>();
var searchString = query.GetQueryString();
var searchUrl = SearchUrl;
@ -119,7 +119,7 @@ namespace Jackett.Common.Indexers
}
var cats = MapTorznabCapsToTrackers(query);
queryCollection.Add("hiddenqualities", String.Join(",", cats));
queryCollection.Add("hiddenqualities", string.Join(",", cats));
searchUrl += "?" + queryCollection.GetQueryString();

View File

@ -22,7 +22,7 @@ namespace Jackett.Common.Indexers
private string LoginReferer { get { return SiteLink + "index.php?cat=1"; } }
private string SearchUrl { get { return SiteLink + "browse.php"; } }
private List<CategoryMapping> resultMapping = new List<CategoryMapping>();
private readonly List<CategoryMapping> resultMapping = new List<CategoryMapping>();
private new ConfigurationDataPinNumber configData
{
@ -283,7 +283,7 @@ namespace Jackett.Common.Indexers
var rows = dom["table > tbody > tr.browse"];
foreach (var row in rows)
{
CQ qRow = row.Cq();
var qRow = row.Cq();
var release = new ReleaseInfo();
release.MinimumRatio = 1;

View File

@ -38,7 +38,7 @@ namespace Jackett.Common.Indexers
private DateTime lastTokenFetch;
private string token;
private string app_id;
private readonly string app_id;
private bool _provideTorrentLink;
private string _sort;

View File

@ -259,8 +259,7 @@ namespace Jackett.Common.Indexers
long? imdbID = null;
if (imdbMatch.Success)
{
long l;
if (long.TryParse(imdbMatch.Value, out l))
if (long.TryParse(imdbMatch.Value, out var l))
{
imdbID = l;
}
@ -321,7 +320,7 @@ namespace Jackett.Common.Indexers
CQ dom = results.Content;
// table header is the first <tr> in table body, get all rows except this
CQ qRows = dom["#torrents-table > tbody > tr:not(:first-child)"];
var qRows = dom["#torrents-table > tbody > tr:not(:first-child)"];
foreach (var row in qRows)
{
@ -334,7 +333,7 @@ namespace Jackett.Common.Indexers
release.MinimumRatio = 1;
release.MinimumSeedTime = 172800; // 48 hours
CQ qLink = qRow.Find(".br_right > a").First();
var qLink = qRow.Find(".br_right > a").First();
release.Guid = new Uri(SiteLink + qLink.Attr("href"));
release.Comments = new Uri(SiteLink + qLink.Attr("href"));
release.Title = qLink.Find("b").Text();

View File

@ -84,16 +84,16 @@ namespace Jackett.Common.Indexers
protected override async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query)
{
TimeZoneInfo.TransitionTime startTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 3, 0, 0), 3, 5, DayOfWeek.Sunday);
TimeZoneInfo.TransitionTime endTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 4, 0, 0), 10, 5, DayOfWeek.Sunday);
TimeSpan delta = new TimeSpan(1, 0, 0);
TimeZoneInfo.AdjustmentRule adjustment = TimeZoneInfo.AdjustmentRule.CreateAdjustmentRule(new DateTime(1999, 10, 1), DateTime.MaxValue.Date, delta, startTransition, endTransition);
var startTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 3, 0, 0), 3, 5, DayOfWeek.Sunday);
var endTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 4, 0, 0), 10, 5, DayOfWeek.Sunday);
var delta = new TimeSpan(1, 0, 0);
var adjustment = TimeZoneInfo.AdjustmentRule.CreateAdjustmentRule(new DateTime(1999, 10, 1), DateTime.MaxValue.Date, delta, startTransition, endTransition);
TimeZoneInfo.AdjustmentRule[] adjustments = { adjustment };
TimeZoneInfo Tz = TimeZoneInfo.CreateCustomTimeZone("custom", new TimeSpan(1, 0, 0), "custom", "custom", "custom", adjustments);
var Tz = TimeZoneInfo.CreateCustomTimeZone("custom", new TimeSpan(1, 0, 0), "custom", "custom", "custom", adjustments);
var releases = new List<ReleaseInfo>();
NameValueCollection qParams = new NameValueCollection();
var qParams = new NameValueCollection();
qParams.Add("api", "");
if (query.ImdbIDShort != null)
qParams.Add("imdb", query.ImdbIDShort);
@ -105,7 +105,7 @@ namespace Jackett.Common.Indexers
qParams.Add("categories[" + cat + "]", "1");
}
string urlSearch = SearchUrl;
var urlSearch = SearchUrl;
urlSearch += "?" + qParams.GetQueryString();
var response = await RequestStringWithCookiesAndRetry(urlSearch);

View File

@ -108,7 +108,7 @@ namespace Jackett.Common.Indexers
{
var loginPage = await RequestStringWithCookies(StartPageUrl, string.Empty);
CQ cq = loginPage.Content;
var result = this.configData;
var result = configData;
result.Captcha.Version = "2";
CQ recaptcha = cq.Find(".g-recaptcha").Attr("data-sitekey");
if (recaptcha.Length != 0)
@ -176,8 +176,8 @@ namespace Jackett.Common.Indexers
qParams.Add("cata", "yes");
qParams.Add("sec", "jax");
List<string> catList = MapTorznabCapsToTrackers(query);
foreach (string cat in catList)
var catList = MapTorznabCapsToTrackers(query);
foreach (var cat in catList)
{
qParams.Add("c" + cat, "1");
}
@ -195,25 +195,25 @@ namespace Jackett.Common.Indexers
var searchUrl = SearchUrl + "?" + qParams.GetQueryString();
var results = await RequestStringWithCookies(searchUrl);
List<ReleaseInfo> releases = ParseResponse(query, results.Content);
var releases = ParseResponse(query, results.Content);
return releases;
}
public List<ReleaseInfo> ParseResponse(TorznabQuery query, string htmlResponse)
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var releases = new List<ReleaseInfo>();
try
{
CQ dom = htmlResponse;
List<string> headerColumns = dom["table[class*='movehere']"].First().Find("tbody > tr > td[class='cat_Head']").Select(x => x.Cq().Text()).ToList();
int categoryIndex = headerColumns.FindIndex(x => x.Equals("Type"));
int nameIndex = headerColumns.FindIndex(x => x.Equals("Name"));
int sizeIndex = headerColumns.FindIndex(x => x.Equals("Size"));
int seedersIndex = headerColumns.FindIndex(x => x.Equals("Seeders"));
int leechersIndex = headerColumns.FindIndex(x => x.Equals("Leechers"));
var headerColumns = dom["table[class*='movehere']"].First().Find("tbody > tr > td[class='cat_Head']").Select(x => x.Cq().Text()).ToList();
var categoryIndex = headerColumns.FindIndex(x => x.Equals("Type"));
var nameIndex = headerColumns.FindIndex(x => x.Equals("Name"));
var sizeIndex = headerColumns.FindIndex(x => x.Equals("Size"));
var seedersIndex = headerColumns.FindIndex(x => x.Equals("Seeders"));
var leechersIndex = headerColumns.FindIndex(x => x.Equals("Leechers"));
var rows = dom["tr.browse"];
foreach (var row in rows)
@ -223,10 +223,10 @@ namespace Jackett.Common.Indexers
release.MinimumSeedTime = 172800; // 48 hours
var categoryCol = row.ChildElements.ElementAt(categoryIndex);
string catLink = categoryCol.Cq().Find("a").Attr("href");
var catLink = categoryCol.Cq().Find("a").Attr("href");
if (catLink != null)
{
string catId = new Regex(@"\?cat=(\d*)").Match(catLink).Groups[1].ToString().Trim();
var catId = new Regex(@"\?cat=(\d*)").Match(catLink).Groups[1].ToString().Trim();
release.Category = MapTrackerCatToNewznab(catId);
}

View File

@ -24,7 +24,7 @@ namespace Jackett.Common.Indexers
private new ConfigurationData configData
{
get { return (ConfigurationData)base.configData; }
get { return base.configData; }
set { base.configData = value; }
}
@ -91,7 +91,7 @@ namespace Jackett.Common.Indexers
release.Comments = new Uri(node.SelectSingleNode("link").InnerText);
// Try to guess the category... I'm not proud of myself...
int category = 5030;
var category = 5030;
if (serie_title.Contains("720p"))
category = 5040;
release.Category = new List<int> { category };

View File

@ -29,7 +29,7 @@ namespace Jackett.Common.Indexers
private ConfigurationData ConfigData
{
get => (ConfigurationData)base.configData;
get => configData;
set => base.configData = value;
}

View File

@ -110,7 +110,7 @@ namespace Jackett.Common.Indexers
{
var releases = new List<ReleaseInfo>();
NameValueCollection qParams = new NameValueCollection();
var qParams = new NameValueCollection();
if (!string.IsNullOrWhiteSpace(query.ImdbID))
{
@ -122,13 +122,13 @@ namespace Jackett.Common.Indexers
qParams.Add("search", query.GetQueryString());
}
List<string> catList = MapTorznabCapsToTrackers(query);
foreach (string cat in catList)
var catList = MapTorznabCapsToTrackers(query);
foreach (var cat in catList)
{
qParams.Add("c" + cat, "1");
}
string urlSearch = SearchUrl;
var urlSearch = SearchUrl;
if (qParams.Count > 0)
{
urlSearch += $"?{qParams.GetQueryString()}";
@ -147,26 +147,25 @@ namespace Jackett.Common.Indexers
CQ dom = response.Content;
var rows = dom["div[id='torrentTable'] > div[class^='box torrentBox'] > div[class='boxContent'] > table > tbody > tr"];
foreach (IDomObject row in rows)
foreach (var row in rows)
{
CQ torrentData = row.OuterHTML;
CQ cells = row.Cq().Find("td");
var cells = row.Cq().Find("td");
string title = torrentData.Find("td[class='lft'] > div > a").First().Text().Trim();
Uri link = new Uri(SiteLink + torrentData.Find("img[title='Download']").First().Parent().Attr("href").Trim());
Uri guid = link;
Uri comments = new Uri(SiteLink + torrentData.Find("td[class='lft'] > div > a").First().Attr("href").Trim().Remove(0, 1));
long size = ReleaseInfo.GetBytes(cells.Elements.ElementAt(4).Cq().Text());
int grabs = ParseUtil.CoerceInt(cells.Elements.ElementAt(5).Cq().Text());
int seeders = ParseUtil.CoerceInt(cells.Elements.ElementAt(6).Cq().Text());
int leechers = ParseUtil.CoerceInt(cells.Elements.ElementAt(7).Cq().Text());
var title = torrentData.Find("td[class='lft'] > div > a").First().Text().Trim();
var link = new Uri(SiteLink + torrentData.Find("img[title='Download']").First().Parent().Attr("href").Trim());
var guid = link;
var comments = new Uri(SiteLink + torrentData.Find("td[class='lft'] > div > a").First().Attr("href").Trim().Remove(0, 1));
var size = ReleaseInfo.GetBytes(cells.Elements.ElementAt(4).Cq().Text());
var grabs = ParseUtil.CoerceInt(cells.Elements.ElementAt(5).Cq().Text());
var seeders = ParseUtil.CoerceInt(cells.Elements.ElementAt(6).Cq().Text());
var leechers = ParseUtil.CoerceInt(cells.Elements.ElementAt(7).Cq().Text());
string pubDateStr = torrentData.Find("span[class^='elapsedDate']").First().Attr("title").Trim().Replace(" at", "");
DateTime publishDate = DateTime.ParseExact(pubDateStr, "dddd, MMMM d, yyyy h:mmtt", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal).ToLocalTime();
var pubDateStr = torrentData.Find("span[class^='elapsedDate']").First().Attr("title").Trim().Replace(" at", "");
var publishDate = DateTime.ParseExact(pubDateStr, "dddd, MMMM d, yyyy h:mmtt", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal).ToLocalTime();
long category = 0;
string cat = torrentData.Find("img[class^='Tcat']").First().Parent().Attr("href").Trim().Remove(0, 5);
long.TryParse(cat, out category);
var cat = torrentData.Find("img[class^='Tcat']").First().Parent().Attr("href").Trim().Remove(0, 5);
long.TryParse(cat, out var category);
var release = new ReleaseInfo();

View File

@ -97,7 +97,7 @@ namespace Jackett.Common.Indexers
protected override async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query)
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var releases = new List<ReleaseInfo>();
var queryCollection = new NameValueCollection();
var searchString = query.GetQueryString();
var searchUrl = SearchUrl;

View File

@ -92,7 +92,7 @@ namespace Jackett.Common.Indexers
var results = await RequestStringWithCookies(searchUrl);
try
{
string RowsSelector = "table.torrent_table > tbody > tr.torrent";
var RowsSelector = "table.torrent_table > tbody > tr.torrent";
var SearchResultParser = new HtmlParser();
var SearchResultDocument = SearchResultParser.ParseDocument(results.Content);

View File

@ -24,8 +24,8 @@ namespace Jackett.Common.Indexers
private string SearchUrl { get { return SiteLink + "torrent/br_process.php"; } }
private string DownloadUrl { get { return SiteLink + "torrent/download.php"; } }
private string BrowseUrl { get { return SiteLink + "torrent/browse.php"; } }
private List<SeriesDetail> series = new List<SeriesDetail>();
private Regex _searchStringRegex = new Regex(@"(.+?)S0?(\d+)(E0?(\d+))?$", RegexOptions.IgnoreCase);
private readonly List<SeriesDetail> series = new List<SeriesDetail>();
private readonly Regex _searchStringRegex = new Regex(@"(.+?)S0?(\d+)(E0?(\d+))?$", RegexOptions.IgnoreCase);
private new ConfigurationDataTVstore configData
{
@ -86,7 +86,7 @@ namespace Jackett.Common.Indexers
public double UploadFactorCalculator(DateTime dateTime, string type)
{
var today = DateTime.Now;
int dd = (today - dateTime).Days;
var dd = (today - dateTime).Days;
/* In case of season Packs */
if (type.Equals("season"))
@ -116,53 +116,53 @@ namespace Jackett.Common.Indexers
/// <param name="query">Query.</param>
/// <param name="already_found">Number of the already found torrents.(used for limit)</param>
/// <param name="limit">The limit to the number of torrents to download </param>
async Task<List<ReleaseInfo>> ParseTorrents(WebClientStringResult results, TorznabQuery query, int already_found, int limit, int previously_parsed_on_page)
private async Task<List<ReleaseInfo>> ParseTorrents(WebClientStringResult results, TorznabQuery query, int already_found, int limit, int previously_parsed_on_page)
{
var releases = new List<ReleaseInfo>();
try
{
String content = results.Content;
var content = results.Content;
/* Content Looks like this
* 2\15\2\1\1727\207244\1x08 \[WebDL-720p - Eng - AJP69]\gb\2018-03-09 08:11:53\akció, kaland, sci-fi \0\0\1\191170047\1\0\Anonymous\50\0\0\\0\4\0\174\0\
* 1\ 0\0\1\1727\207243\1x08 \[WebDL-1080p - Eng - AJP69]\gb\2018-03-09 08:11:49\akció, kaland, sci-fi \0\0\1\305729738\1\0\Anonymous\50\0\0\\0\8\0\102\0\0\0\0\1\\\
*/
string[] parameters = content.Split(new string[] { "\\" }, StringSplitOptions.None);
string type = "normal";
var parameters = content.Split(new string[] { "\\" }, StringSplitOptions.None);
var type = "normal";
/*
* Split the releases by '\' and go through them.
* 27 element belongs to one torrent
*/
for (int j = previously_parsed_on_page * 27; (j + 27 < parameters.Length && ((already_found + releases.Count) < limit)); j = j + 27)
for (var j = previously_parsed_on_page * 27; (j + 27 < parameters.Length && ((already_found + releases.Count) < limit)); j = j + 27)
{
ReleaseInfo release = new ReleaseInfo();
var release = new ReleaseInfo();
int imdb_id = 4 + j;
int torrent_id = 5 + j;
int is_season_id = 6 + j;
int publish_date_id = 9 + j;
int files_id = 13 + j;
int size_id = 14 + j;
int seeders_id = 23;
int peers_id = 24 + j;
int grabs_id = 25 + j;
var imdb_id = 4 + j;
var torrent_id = 5 + j;
var is_season_id = 6 + j;
var publish_date_id = 9 + j;
var files_id = 13 + j;
var size_id = 14 + j;
var seeders_id = 23;
var peers_id = 24 + j;
var grabs_id = 25 + j;
type = "normal";
//IMDB id of the series
SeriesDetail seriesinfo = series.Find(x => x.id.Contains(parameters[imdb_id]));
var seriesinfo = series.Find(x => x.id.Contains(parameters[imdb_id]));
if (seriesinfo != null && !parameters[imdb_id].Equals(""))
release.Imdb = long.Parse(seriesinfo.imdbid);
//ID of the torrent
Int32 unixTimestamp = (Int32)(DateTime.UtcNow.Subtract(new DateTime(1970, 1, 1))).TotalSeconds;
var unixTimestamp = (int)(DateTime.UtcNow.Subtract(new DateTime(1970, 1, 1))).TotalSeconds;
string fileinfoURL = SearchUrl + "?func=getToggle&id=" + parameters[torrent_id] + "&w=F&pg=0&now=" + unixTimestamp;
string fileinfo = (await RequestStringWithCookiesAndRetry(fileinfoURL)).Content;
var fileinfoURL = SearchUrl + "?func=getToggle&id=" + parameters[torrent_id] + "&w=F&pg=0&now=" + unixTimestamp;
var fileinfo = (await RequestStringWithCookiesAndRetry(fileinfoURL)).Content;
release.Link = new Uri(DownloadUrl + "?id=" + parameters[torrent_id]);
release.Guid = release.Link;
release.Comments = release.Link;
string[] fileinf = fileinfo.Split(new string[] { "\\\\" }, StringSplitOptions.None);
var fileinf = fileinfo.Split(new string[] { "\\\\" }, StringSplitOptions.None);
if (fileinf.Length > 1)
{
release.Title = fileinf[1];
@ -215,7 +215,7 @@ namespace Jackett.Common.Indexers
/// - IMDB ID
/// </summary>
/// <returns>The series info.</returns>
protected async Task<Boolean> GetSeriesInfo()
protected async Task<bool> GetSeriesInfo()
{
var result = (await RequestStringWithCookiesAndRetry(BrowseUrl)).Content;
@ -227,14 +227,14 @@ namespace Jackett.Common.Indexers
{
if (script.TextContent.Contains("catsh=Array"))
{
string[] seriesknowbysite = Regex.Split(script.TextContent, "catl");
for (int i = 1; i < seriesknowbysite.Length; i++)
var seriesknowbysite = Regex.Split(script.TextContent, "catl");
for (var i = 1; i < seriesknowbysite.Length; i++)
{
try
{
var id = seriesknowbysite[i];
string[] serieselement = WebUtility.HtmlDecode(id).Split(';');
SeriesDetail sd = new SeriesDetail();
var serieselement = WebUtility.HtmlDecode(id).Split(';');
var sd = new SeriesDetail();
sd.HunName = serieselement[1].Split('=')[1].Trim('\'').ToLower();
sd.EngName = serieselement[2].Split('=')[1].Trim('\'').ToLower();
sd.id = serieselement[0].Split('=')[1].Trim('\'');
@ -261,16 +261,16 @@ namespace Jackett.Common.Indexers
await GetSeriesInfo();
}
Int32 unixTimestamp = (Int32)(DateTime.UtcNow.Subtract(new DateTime(1970, 1, 1))).TotalSeconds;
var unixTimestamp = (int)(DateTime.UtcNow.Subtract(new DateTime(1970, 1, 1))).TotalSeconds;
WebClientStringResult results;
string searchString = "";
string exactSearchURL = "";
int page = 1;
var searchString = "";
var exactSearchURL = "";
var page = 1;
SeriesDetail seriesinfo = null;
String base64coded = "";
bool noimdbmatch = false;
var base64coded = "";
var noimdbmatch = false;
var limit = query.Limit;
if (limit == 0)
limit = 100;
@ -279,7 +279,7 @@ namespace Jackett.Common.Indexers
seriesinfo = series.Find(x => x.imdbid.Equals(query.ImdbIDShort));
if (seriesinfo != null && !query.ImdbIDShort.Equals(""))
{
String querrySeason = "";
var querrySeason = "";
if (query.Season != 0)
querrySeason = query.Season.ToString();
exactSearchURL = SearchUrl + "?s=" + querrySeason + "&e=" + query.Episode + "&g=" + seriesinfo.id + "&now=" + unixTimestamp.ToString();
@ -300,7 +300,7 @@ namespace Jackett.Common.Indexers
// convert SnnEnn to nnxnn for dashboard searches
if (query.Season == 0 && (query.Episode == null || query.Episode.Equals("")))
{
Match searchMatch = _searchStringRegex.Match(searchString);
var searchMatch = _searchStringRegex.Match(searchString);
if (searchMatch.Success)
{
query.Season = int.Parse(searchMatch.Groups[2].Value);
@ -332,21 +332,21 @@ namespace Jackett.Common.Indexers
results = await RequestStringWithCookiesAndRetry(exactSearchURL);
/* Parse page Information from result */
string content = results.Content;
var content = results.Content;
var splits = content.Split('\\');
int max_found = int.Parse(splits[0]);
int torrent_per_page = int.Parse(splits[1]);
var max_found = int.Parse(splits[0]);
var torrent_per_page = int.Parse(splits[1]);
if (torrent_per_page == 0)
return releases;
int start_page = (query.Offset / torrent_per_page) + 1;
int previously_parsed_on_page = query.Offset - (start_page * torrent_per_page) + 1; //+1 because indexing start from 0
var start_page = (query.Offset / torrent_per_page) + 1;
var previously_parsed_on_page = query.Offset - (start_page * torrent_per_page) + 1; //+1 because indexing start from 0
if (previously_parsed_on_page <= 0)
previously_parsed_on_page = query.Offset;
double pages = Math.Ceiling((double)max_found / (double)torrent_per_page);
var pages = Math.Ceiling(max_found / (double)torrent_per_page);
/* First page content is already ready */
if (start_page == 1)

View File

@ -126,7 +126,7 @@ namespace Jackett.Common.Indexers
if (loginPage.IsRedirect)
loginPage = await RequestStringWithCookies(loginPage.RedirectingTo, string.Empty);
CQ cq = loginPage.Content;
var result = this.configData;
var result = configData;
//result.CookieHeader.Value = loginPage.Cookies;
UpdateCookieHeader(loginPage.Cookies); // update cookies instead of replacing them, see #3717

View File

@ -102,7 +102,7 @@ namespace Jackett.Common.Indexers
{
var loginPage = await RequestStringWithCookies(IndexUrl, string.Empty);
CQ dom = loginPage.Content;
CQ qCaptchaImg = dom.Find("td.tablea > img").First();
var qCaptchaImg = dom.Find("td.tablea > img").First();
if (qCaptchaImg.Length == 1)
{
var CaptchaUrl = SiteLink + qCaptchaImg.Attr("src");
@ -157,12 +157,12 @@ namespace Jackett.Common.Indexers
protected override async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query)
{
TimeZoneInfo.TransitionTime startTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 3, 0, 0), 3, 5, DayOfWeek.Sunday);
TimeZoneInfo.TransitionTime endTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 4, 0, 0), 10, 5, DayOfWeek.Sunday);
TimeSpan delta = new TimeSpan(1, 0, 0);
TimeZoneInfo.AdjustmentRule adjustment = TimeZoneInfo.AdjustmentRule.CreateAdjustmentRule(new DateTime(1999, 10, 1), DateTime.MaxValue.Date, delta, startTransition, endTransition);
var startTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 3, 0, 0), 3, 5, DayOfWeek.Sunday);
var endTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 4, 0, 0), 10, 5, DayOfWeek.Sunday);
var delta = new TimeSpan(1, 0, 0);
var adjustment = TimeZoneInfo.AdjustmentRule.CreateAdjustmentRule(new DateTime(1999, 10, 1), DateTime.MaxValue.Date, delta, startTransition, endTransition);
TimeZoneInfo.AdjustmentRule[] adjustments = { adjustment };
TimeZoneInfo germanyTz = TimeZoneInfo.CreateCustomTimeZone("W. Europe Standard Time", new TimeSpan(1, 0, 0), "(GMT+01:00) W. Europe Standard Time", "W. Europe Standard Time", "W. Europe DST Time", adjustments);
var germanyTz = TimeZoneInfo.CreateCustomTimeZone("W. Europe Standard Time", new TimeSpan(1, 0, 0), "(GMT+01:00) W. Europe Standard Time", "W. Europe Standard Time", "W. Europe DST Time", adjustments);
var releases = new List<ReleaseInfo>();
@ -237,7 +237,7 @@ namespace Jackett.Common.Indexers
else
dateGerman = DateTime.SpecifyKind(DateTime.ParseExact(dateStrParts[0] + dateStrParts[1], "dd.MM.yyyyHH:mm", CultureInfo.InvariantCulture), DateTimeKind.Unspecified);
DateTime pubDateUtc = TimeZoneInfo.ConvertTimeToUtc(dateGerman, germanyTz);
var pubDateUtc = TimeZoneInfo.ConvertTimeToUtc(dateGerman, germanyTz);
release.PublishDate = pubDateUtc.ToLocalTime();
var grabs = qRow.Find("td:nth-child(7)").Text();

View File

@ -105,7 +105,7 @@ namespace Jackett.Common.Indexers
var captcha = cq.Find(".g-recaptcha");
if (captcha.Any())
{
var result = this.configData;
var result = configData;
result.CookieHeader.Value = loginPage.Cookies;
result.Captcha.SiteKey = captcha.Attr("data-sitekey");
result.Captcha.Version = "2";

View File

@ -20,7 +20,7 @@ namespace Jackett.Common.Indexers
private string APIUrl { get { return SiteLink + "api/"; } }
private string passkey;
private Dictionary<string, string> APIHeaders = new Dictionary<string, string>()
private readonly Dictionary<string, string> APIHeaders = new Dictionary<string, string>()
{
{"Content-Type", "application/json"},
};

View File

@ -22,7 +22,7 @@ namespace Jackett.Common.Indexers
private string SearchUrl { get { return SiteLink + "browse.php"; } }
private string LoginUrl { get { return SiteLink + "eing2.php"; } }
private string CaptchaUrl { get { return SiteLink + "simpleCaptcha.php?numImages=1"; } }
private TimeZoneInfo germanyTz;
private readonly TimeZoneInfo germanyTz;
private new ConfigurationDataBasicLoginWithRSSAndDisplay configData
{
@ -47,8 +47,8 @@ namespace Jackett.Common.Indexers
TorznabCaps.SupportsImdbMovieSearch = true;
this.configData.DisplayText.Value = "Only the results from the first search result page are shown, adjust your profile settings to show the maximum.";
this.configData.DisplayText.Name = "Notice";
configData.DisplayText.Value = "Only the results from the first search result page are shown, adjust your profile settings to show the maximum.";
configData.DisplayText.Name = "Notice";
AddCategoryMapping(2, TorznabCatType.PC, "Apps / Windows");
AddCategoryMapping(13, TorznabCatType.PC, "Apps / Linux");
@ -95,10 +95,10 @@ namespace Jackett.Common.Indexers
AddCategoryMapping(48, TorznabCatType.Other, "Englisch / Bildung");
AddCategoryMapping(49, TorznabCatType.TVSport, "Englisch / Sport");
TimeZoneInfo.TransitionTime startTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 3, 0, 0), 3, 5, DayOfWeek.Sunday);
TimeZoneInfo.TransitionTime endTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 4, 0, 0), 10, 5, DayOfWeek.Sunday);
TimeSpan delta = new TimeSpan(1, 0, 0);
TimeZoneInfo.AdjustmentRule adjustment = TimeZoneInfo.AdjustmentRule.CreateAdjustmentRule(new DateTime(1999, 10, 1), DateTime.MaxValue.Date, delta, startTransition, endTransition);
var startTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 3, 0, 0), 3, 5, DayOfWeek.Sunday);
var endTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 4, 0, 0), 10, 5, DayOfWeek.Sunday);
var delta = new TimeSpan(1, 0, 0);
var adjustment = TimeZoneInfo.AdjustmentRule.CreateAdjustmentRule(new DateTime(1999, 10, 1), DateTime.MaxValue.Date, delta, startTransition, endTransition);
TimeZoneInfo.AdjustmentRule[] adjustments = { adjustment };
germanyTz = TimeZoneInfo.CreateCustomTimeZone("W. Europe Standard Time", new TimeSpan(1, 0, 0), "(GMT+01:00) W. Europe Standard Time", "W. Europe Standard Time", "W. Europe DST Time", adjustments);
}
@ -131,7 +131,7 @@ namespace Jackett.Common.Indexers
protected override async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query)
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var releases = new List<ReleaseInfo>();
var searchString = query.GetQueryString();
var searchUrl = SearchUrl;
@ -154,7 +154,7 @@ namespace Jackett.Common.Indexers
// use AND+wildcard operator to avoid getting to many useless results
var searchStringArray = Regex.Split(searchString.Trim(), "[ _.-]+", RegexOptions.Compiled).ToList();
searchStringArray = searchStringArray.Select(x => "+" + x).ToList(); // add AND operators
var searchStringFinal = String.Join(" ", searchStringArray);
var searchStringFinal = string.Join(" ", searchStringArray);
queryCollection.Add("search", searchStringFinal);
}
@ -199,7 +199,7 @@ namespace Jackett.Common.Indexers
var torrentTag = descCol.Cq().Find("span.torrent-tag");
var torrentTags = torrentTag.Elements.Select(x => x.InnerHTML).ToList();
release.Title = qCommentLink.Attr("title");
release.Description = String.Join(", ", torrentTags);
release.Description = string.Join(", ", torrentTags);
release.Comments = new Uri(SiteLink + qCommentLink.Attr("href").Replace("&hit=1", ""));
release.Guid = release.Comments;
@ -214,7 +214,7 @@ namespace Jackett.Common.Indexers
else
dateGerman = DateTime.SpecifyKind(DateTime.ParseExact(dateStr, "dd.MM.yyyy HH:mm", CultureInfo.InvariantCulture), DateTimeKind.Unspecified);
DateTime pubDateUtc = TimeZoneInfo.ConvertTimeToUtc(dateGerman, germanyTz);
var pubDateUtc = TimeZoneInfo.ConvertTimeToUtc(dateGerman, germanyTz);
release.PublishDate = pubDateUtc.ToLocalTime();
var imdbLink = descCol.Cq().Find("a[href*=\"&searchin=imdb\"]");

View File

@ -103,7 +103,7 @@ namespace Jackett.Common.Indexers
}
try
{
string RowsSelector = "div.borderwrap:has(div.maintitle) > table > tbody > tr:has(a[href*=\"index.php?showtopic=\"])";
var RowsSelector = "div.borderwrap:has(div.maintitle) > table > tbody > tr:has(a[href*=\"index.php?showtopic=\"])";
var SearchResultParser = new HtmlParser();
var SearchResultDocument = SearchResultParser.ParseDocument(results.Content);

View File

@ -21,7 +21,7 @@ namespace Jackett.Common.Indexers
private new ConfigurationData configData
{
get { return (ConfigurationData)base.configData; }
get { return base.configData; }
set { base.configData = value; }
}
@ -104,7 +104,7 @@ namespace Jackett.Common.Indexers
release.Title = torrent.Value<string>("name");
// construct magnet link from infohash with all public trackers known to man
string magnet_uri = "magnet:?xt=urn:btih:" + torrent.Value<JToken>("infohash") +
var magnet_uri = "magnet:?xt=urn:btih:" + torrent.Value<JToken>("infohash") +
"&tr=udp://tracker.coppersurfer.tk:6969/announce" +
"&tr=udp://tracker.leechers-paradise.org:6969/announce" +
"&tr=udp://tracker.internetwarriors.net:1337/announce" +
@ -148,7 +148,7 @@ namespace Jackett.Common.Indexers
// convert unix timestamp to human readable date
double createdunix = torrent.Value<long>("created_unix");
System.DateTime dateTime = new System.DateTime(1970, 1, 1, 0, 0, 0, 0);
var dateTime = new System.DateTime(1970, 1, 1, 0, 0, 0, 0);
dateTime = dateTime.AddSeconds(createdunix);
release.PublishDate = dateTime;
release.Seeders = torrent.Value<int>("seeders");
@ -164,7 +164,7 @@ namespace Jackett.Common.Indexers
release.UploadVolumeFactor = 1;
// dummy mappings for sonarr, radarr, etc
string categories = string.Join(";", MapTorznabCapsToTrackers(query));
var categories = string.Join(";", MapTorznabCapsToTrackers(query));
if (!string.IsNullOrEmpty(categories))
{
if (categories.Contains("1000"))

View File

@ -88,18 +88,18 @@ namespace Jackett.Common.Indexers
// Url = SearchUrl;
//else
//{
string Url = $"{SearchUrl}&searchtext={WebUtility.UrlEncode(query.GetQueryString())}";
var Url = $"{SearchUrl}&searchtext={WebUtility.UrlEncode(query.GetQueryString())}";
//}
var response = await RequestStringWithCookiesAndRetry(Url);
List<ReleaseInfo> releases = ParseResponse(response.Content);
var releases = ParseResponse(response.Content);
return releases;
}
public List<ReleaseInfo> ParseResponse(string htmlResponse)
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var releases = new List<ReleaseInfo>();
try
{
@ -116,7 +116,7 @@ namespace Jackett.Common.Indexers
{
var release = new ReleaseInfo();
string title = row.QuerySelector("a[data-src]").GetAttribute("data-src");
var title = row.QuerySelector("a[data-src]").GetAttribute("data-src");
if (string.IsNullOrEmpty(title) || title == "0")
{
title = row.QuerySelector("a[data-src]").TextContent;

View File

@ -137,7 +137,7 @@ namespace Jackett.Common.Indexers
{
var loginPage = await RequestStringWithCookies(LandingUrl);
CQ dom = loginPage.Content;
CQ qCaptchaImg = dom.Find("img#regimage").First();
var qCaptchaImg = dom.Find("img#regimage").First();
if (qCaptchaImg.Length > 0)
{
var CaptchaUrl = qCaptchaImg.Attr("src");

View File

@ -184,7 +184,7 @@ namespace Jackett.Common.Indexers
if (EnhancedAnime && query.HasSpecifiedCategories && (query.Categories.Contains(TorznabCatType.TVAnime.ID) || query.Categories.Contains(100032) || query.Categories.Contains(100101) || query.Categories.Contains(100110)))
{
System.Text.RegularExpressions.Regex regex = new Regex(" ([0-9]+)");
var regex = new Regex(" ([0-9]+)");
searchTerm = regex.Replace(searchTerm, " E$1");
}
@ -226,7 +226,7 @@ namespace Jackett.Common.Indexers
//issue #3847 replace multi keyword
if (!string.IsNullOrEmpty(ReplaceMulti))
{
System.Text.RegularExpressions.Regex regex = new Regex("(?i)([\\.\\- ])MULTI([\\.\\- ])");
var regex = new Regex("(?i)([\\.\\- ])MULTI([\\.\\- ])");
torrent.name = regex.Replace(torrent.name, "$1" + ReplaceMulti + "$2");
}
@ -388,9 +388,9 @@ namespace Jackett.Common.Indexers
/// </summary>
/// <param name="request">URL created by Query Builder</param>
/// <returns>Results from query</returns>
private async Task<String> QueryExec(string request)
private async Task<string> QueryExec(string request)
{
String results;
string results;
// Switch in we are in DEV mode with Hard Drive Cache or not
if (DevMode && CacheMode)
@ -411,9 +411,9 @@ namespace Jackett.Common.Indexers
/// </summary>
/// <param name="request">URL created by Query Builder</param>
/// <returns>Results from query</returns>
private async Task<String> QueryCache(string request)
private async Task<string> QueryCache(string request)
{
String results;
string results;
// Create Directory if not exist
System.IO.Directory.CreateDirectory(Directory);
@ -422,10 +422,10 @@ namespace Jackett.Common.Indexers
CleanCacheStorage();
// File Name
string fileName = StringUtil.HashSHA1(request) + ".json";
var fileName = StringUtil.HashSHA1(request) + ".json";
// Create fingerprint for request
string file = Path.Combine(Directory, fileName);
var file = Path.Combine(Directory, fileName);
// Checking modes states
if (File.Exists(file))
@ -434,10 +434,10 @@ namespace Jackett.Common.Indexers
Output("Loading results from hard drive cache ..." + fileName);
try
{
using (StreamReader fileReader = File.OpenText(file))
using (var fileReader = File.OpenText(file))
{
JsonSerializer serializer = new JsonSerializer();
results = (String)serializer.Deserialize(fileReader, typeof(String));
var serializer = new JsonSerializer();
results = (string)serializer.Deserialize(fileReader, typeof(string));
}
}
catch (Exception e)
@ -453,9 +453,9 @@ namespace Jackett.Common.Indexers
// Cached file didn't exist for our query, writing it right now !
Output("Writing results to hard drive cache ..." + fileName);
using (StreamWriter fileWriter = File.CreateText(file))
using (var fileWriter = File.CreateText(file))
{
JsonSerializer serializer = new JsonSerializer();
var serializer = new JsonSerializer();
serializer.Serialize(fileWriter, results);
}
}
@ -467,7 +467,7 @@ namespace Jackett.Common.Indexers
/// </summary>
/// <param name="request">URL created by Query Builder</param>
/// <returns>Results from query</returns>
private async Task<String> QueryTracker(string request)
private async Task<string> QueryTracker(string request)
{
// Cache mode not enabled or cached file didn't exist for our query
Output("\nQuerying tracker for results....");

View File

@ -109,7 +109,7 @@ namespace Jackett.Common.Indexers
protected override async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query)
{
List<ReleaseInfo> releases = new List<ReleaseInfo>();
var releases = new List<ReleaseInfo>();
var queryCollection = new NameValueCollection();
var searchString = query.GetQueryString();
var searchUrl = SearchUrl;

View File

@ -87,12 +87,12 @@ namespace Jackett.Common.Indexers
protected override async Task<IEnumerable<ReleaseInfo>> PerformQuery(TorznabQuery query)
{
TimeZoneInfo.TransitionTime startTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 3, 0, 0), 3, 5, DayOfWeek.Sunday);
TimeZoneInfo.TransitionTime endTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 4, 0, 0), 10, 5, DayOfWeek.Sunday);
TimeSpan delta = new TimeSpan(1, 0, 0);
TimeZoneInfo.AdjustmentRule adjustment = TimeZoneInfo.AdjustmentRule.CreateAdjustmentRule(new DateTime(1999, 10, 1), DateTime.MaxValue.Date, delta, startTransition, endTransition);
var startTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 3, 0, 0), 3, 5, DayOfWeek.Sunday);
var endTransition = TimeZoneInfo.TransitionTime.CreateFloatingDateRule(new DateTime(1, 1, 1, 4, 0, 0), 10, 5, DayOfWeek.Sunday);
var delta = new TimeSpan(1, 0, 0);
var adjustment = TimeZoneInfo.AdjustmentRule.CreateAdjustmentRule(new DateTime(1999, 10, 1), DateTime.MaxValue.Date, delta, startTransition, endTransition);
TimeZoneInfo.AdjustmentRule[] adjustments = { adjustment };
TimeZoneInfo germanyTz = TimeZoneInfo.CreateCustomTimeZone("W. Europe Standard Time", new TimeSpan(1, 0, 0), "(GMT+01:00) W. Europe Standard Time", "W. Europe Standard Time", "W. Europe DST Time", adjustments);
var germanyTz = TimeZoneInfo.CreateCustomTimeZone("W. Europe Standard Time", new TimeSpan(1, 0, 0), "(GMT+01:00) W. Europe Standard Time", "W. Europe Standard Time", "W. Europe DST Time", adjustments);
var releases = new List<ReleaseInfo>();
@ -165,9 +165,9 @@ namespace Jackett.Common.Indexers
release.Peers = ParseUtil.CoerceInt(qLeechers.Text()) + release.Seeders;
var dateStr = qDateStr.Text().Trim();
DateTime dateGerman = DateTime.SpecifyKind(DateTime.ParseExact(dateStr, "dd.MM.yy HH:mm:ss", CultureInfo.InvariantCulture), DateTimeKind.Unspecified);
var dateGerman = DateTime.SpecifyKind(DateTime.ParseExact(dateStr, "dd.MM.yy HH:mm:ss", CultureInfo.InvariantCulture), DateTimeKind.Unspecified);
DateTime pubDateUtc = TimeZoneInfo.ConvertTimeToUtc(dateGerman, germanyTz);
var pubDateUtc = TimeZoneInfo.ConvertTimeToUtc(dateGerman, germanyTz);
release.PublishDate = pubDateUtc.ToLocalTime();
var grabs = qRow.Find("td:nth-child(6)").Text();

View File

@ -250,7 +250,7 @@ namespace Jackett.Common.Indexers
}
try
{
string RowsSelector = "table#tor-tbl > tbody > tr";
var RowsSelector = "table#tor-tbl > tbody > tr";
var SearchResultParser = new HtmlParser();
var SearchResultDocument = SearchResultParser.ParseDocument(results.Content);
@ -322,7 +322,7 @@ namespace Jackett.Common.Indexers
// referer link support
public override async Task<byte[]> Download(Uri link)
{
Uri downloadlink = link;
var downloadlink = link;
var response = await RequestStringWithCookies(link.ToString());
var results = response.Content;
var SearchResultParser = new HtmlParser();

View File

@ -1532,7 +1532,7 @@ namespace Jackett.Common.Indexers
}
try
{
string RowsSelector = "table#tor-tbl > tbody > tr";
var RowsSelector = "table#tor-tbl > tbody > tr";
var SearchResultParser = new HtmlParser();
var SearchResultDocument = SearchResultParser.ParseDocument(results.Content);

View File

@ -240,7 +240,7 @@ namespace Jackett.Common.Indexers
}
try
{
string RowsSelector = "table.forumline > tbody > tr[class*=prow]";
var RowsSelector = "table.forumline > tbody > tr[class*=prow]";
var SearchResultParser = new HtmlParser();
var SearchResultDocument = SearchResultParser.ParseDocument(results.Content);

View File

@ -27,7 +27,7 @@ namespace Jackett.Common.Indexers
private new ConfigurationData configData
{
get { return (ConfigurationData)base.configData; }
get { return base.configData; }
set { base.configData = value; }
}
@ -108,14 +108,14 @@ namespace Jackett.Common.Indexers
var jsonContent = JObject.Parse(jsonContentStr);
string result = jsonContent.Value<string>("status");
var result = jsonContent.Value<string>("status");
if (result != "ok") // query was not successful
{
return releases.ToArray();
}
var data_items = jsonContent.Value<JToken>("data");
int movie_count = data_items.Value<int>("movie_count");
var movie_count = data_items.Value<int>("movie_count");
if (movie_count < 1) // no results found in query
{
return releases.ToArray();
@ -151,7 +151,7 @@ namespace Jackett.Common.Indexers
release.Imdb = ParseUtil.GetImdbID(imdb);
// API does not provide magnet link, so, construct it
string magnet_uri = "magnet:?xt=urn:btih:" + torrent_info.Value<string>("hash") +
var magnet_uri = "magnet:?xt=urn:btih:" + torrent_info.Value<string>("hash") +
"&dn=" + movie_item.Value<string>("slug") +
"&tr=udp://open.demonii.com:1337/announce" +
"&tr=udp://tracker.openbittorrent.com:80" +

View File

@ -5,9 +5,9 @@ namespace Jackett.Common.Models
{
public class CachedQueryResult
{
private List<ReleaseInfo> results;
private DateTime created;
private string query;
private readonly List<ReleaseInfo> results;
private readonly DateTime created;
private readonly string query;
public CachedQueryResult(string query, List<ReleaseInfo> results)
{

View File

@ -1,6 +1,6 @@
namespace Jackett.Common.Models
{
class CategoryMapping
internal class CategoryMapping
{
public CategoryMapping(string trackerCat, string trackerCatDesc, int newzCat)
{

View File

@ -32,9 +32,9 @@ namespace Jackett.Common.Models.Config
{
get
{
if (!string.IsNullOrWhiteSpace(this.CustomDataFolder))
if (!string.IsNullOrWhiteSpace(CustomDataFolder))
{
return this.CustomDataFolder;
return CustomDataFolder;
}
if (System.Environment.OSVersion.Platform == PlatformID.Unix)

View File

@ -120,20 +120,20 @@ namespace Jackett.Common.Models.Config
private class UnSubscriber : IDisposable
{
private List<IObserver<ServerConfig>> lstObservers;
private IObserver<ServerConfig> observer;
private readonly List<IObserver<ServerConfig>> lstObservers;
private readonly IObserver<ServerConfig> observer;
public UnSubscriber(List<IObserver<ServerConfig>> ObserversCollection, IObserver<ServerConfig> observer)
{
this.lstObservers = ObserversCollection;
lstObservers = ObserversCollection;
this.observer = observer;
}
public void Dispose()
{
if (this.observer != null)
if (observer != null)
{
lstObservers.Remove(this.observer);
lstObservers.Remove(observer);
}
}
}

View File

@ -1,6 +1,6 @@
namespace Jackett.Common.Models.IndexerConfig.Bespoke
{
class ConfigurationDataAbnormal : ConfigurationData
internal class ConfigurationDataAbnormal : ConfigurationData
{
public HiddenItem AuthKey { get; set; }
public HiddenItem TorrentPass { get; set; }

View File

@ -1,6 +1,6 @@
namespace Jackett.Common.Models.IndexerConfig.Bespoke
{
class ConfigurationDataAnimeBytes : ConfigurationDataUserPasskey
internal class ConfigurationDataAnimeBytes : ConfigurationDataUserPasskey
{
public BoolItem IncludeRaw { get; private set; }
//public DisplayItem DateWarning { get; private set; }

View File

@ -1,6 +1,6 @@
namespace Jackett.Common.Models.IndexerConfig.Bespoke
{
class ConfigurationDataEliteTracker : ConfigurationDataBasicLogin
internal class ConfigurationDataEliteTracker : ConfigurationDataBasicLogin
{
public BoolItem TorrentHTTPSMode { get; private set; }
public DisplayItem PagesWarning { get; private set; }

View File

@ -1,6 +1,6 @@
namespace Jackett.Common.Models.IndexerConfig.Bespoke
{
class ConfigurationDataFileList : ConfigurationDataBasicLogin
internal class ConfigurationDataFileList : ConfigurationDataBasicLogin
{
public BoolItem IncludeRomanianReleases { get; private set; }
public DisplayItem CatWarning { get; private set; }

View File

@ -22,12 +22,12 @@ namespace Jackett.Common.Models.IndexerConfig.Bespoke
public ConfigurationDataNCore(JToken json)
{
ConfigurationDataNCore configData = new ConfigurationDataNCore();
var configData = new ConfigurationDataNCore();
dynamic configArray = JsonConvert.DeserializeObject(json.ToString());
foreach (var config in configArray)
{
string propertyName = UppercaseFirst((string)config.id);
var propertyName = UppercaseFirst((string)config.id);
switch (propertyName)
{
case "Username":
@ -51,7 +51,7 @@ namespace Jackett.Common.Models.IndexerConfig.Bespoke
}
}
static string UppercaseFirst(string s)
private static string UppercaseFirst(string s)
{
if (string.IsNullOrEmpty(s))
return string.Empty;

View File

@ -1,6 +1,6 @@
namespace Jackett.Common.Models.IndexerConfig.Bespoke
{
class ConfigurationDataPornolab : ConfigurationDataCaptchaLogin
internal class ConfigurationDataPornolab : ConfigurationDataCaptchaLogin
{
public BoolItem StripRussianLetters { get; private set; }

View File

@ -1,6 +1,6 @@
namespace Jackett.Common.Models.IndexerConfig.Bespoke
{
class ConfigurationDataRutracker : ConfigurationDataCaptchaLogin
internal class ConfigurationDataRutracker : ConfigurationDataCaptchaLogin
{
public BoolItem StripRussianLetters { get; private set; }
public DisplayItem CaptchaWarning { get; private set; }

View File

@ -16,12 +16,12 @@ namespace Jackett.Common.Models.IndexerConfig.Bespoke
public ConfigurationDataTVstore(JToken json)
{
ConfigurationDataTVstore configData = new ConfigurationDataTVstore();
var configData = new ConfigurationDataTVstore();
dynamic configArray = JsonConvert.DeserializeObject(json.ToString());
foreach (var config in configArray)
{
string propertyName = UppercaseFirst((string)config.id);
var propertyName = UppercaseFirst((string)config.id);
switch (propertyName)
{
case "Username":
@ -36,7 +36,7 @@ namespace Jackett.Common.Models.IndexerConfig.Bespoke
}
}
static string UppercaseFirst(string s)
private static string UppercaseFirst(string s)
{
if (string.IsNullOrEmpty(s))
return string.Empty;

View File

@ -1,6 +1,6 @@
namespace Jackett.Common.Models.IndexerConfig.Bespoke
{
class ConfigurationDataToloka : ConfigurationDataBasicLogin
internal class ConfigurationDataToloka : ConfigurationDataBasicLogin
{
public BoolItem StripCyrillicLetters { get; private set; }

View File

@ -1,6 +1,6 @@
namespace Jackett.Common.Models.IndexerConfig.Bespoke
{
class ConfigurationDataXthor : ConfigurationData
internal class ConfigurationDataXthor : ConfigurationData
{
public DisplayItem CredentialsWarning { get; private set; }
public StringItem PassKey { get; set; }

View File

@ -9,7 +9,7 @@ namespace Jackett.Common.Models.IndexerConfig
{
public class ConfigurationData
{
const string PASSWORD_REPLACEMENT = "|||%%PREVJACKPASSWD%%|||";
private const string PASSWORD_REPLACEMENT = "|||%%PREVJACKPASSWD%%|||";
protected Dictionary<string, Item> dynamics = new Dictionary<string, Item>(); // list for dynamic items
public enum ItemType
@ -140,7 +140,7 @@ namespace Jackett.Common.Models.IndexerConfig
}
break;
case ItemType.DisplayImage:
string dataUri = DataUrlUtils.BytesToDataUrl(((ImageItem)item).Value, "image/jpeg");
var dataUri = DataUrlUtils.BytesToDataUrl(((ImageItem)item).Value, "image/jpeg");
jObject["value"] = dataUri;
break;
}
@ -149,9 +149,9 @@ namespace Jackett.Common.Models.IndexerConfig
return jArray;
}
Item[] GetItems(bool forDisplay)
private Item[] GetItems(bool forDisplay)
{
List<Item> properties = GetType()
var properties = GetType()
.GetProperties()
.Where(p => p.CanRead)
.Where(p => p.PropertyType.IsSubclassOf(typeof(Item)))
@ -237,7 +237,7 @@ namespace Jackett.Common.Models.IndexerConfig
public string Challenge { get; set; }
public RecaptchaItem()
{
this.Version = "2";
Version = "2";
ItemType = ConfigurationData.ItemType.Recaptcha;
}
}

View File

@ -1,6 +1,6 @@
namespace Jackett.Common.Models.IndexerConfig
{
class ConfigurationDataCaptchaLogin : ConfigurationData
internal class ConfigurationDataCaptchaLogin : ConfigurationData
{
public StringItem Username { get; private set; }

View File

@ -1,6 +1,6 @@
namespace Jackett.Common.Models.IndexerConfig
{
class ConfigurationDataPinNumber : ConfigurationDataBasicLogin
internal class ConfigurationDataPinNumber : ConfigurationDataBasicLogin
{
public StringItem Pin { get; private set; }

View File

@ -9,8 +9,8 @@ namespace Jackett.Common.Models
{
public class ResultPage
{
static XNamespace atomNs = "http://www.w3.org/2005/Atom";
static XNamespace torznabNs = "http://torznab.com/schemas/2015/feed";
private static readonly XNamespace atomNs = "http://www.w3.org/2005/Atom";
private static readonly XNamespace torznabNs = "http://torznab.com/schemas/2015/feed";
public ChannelInfo ChannelInfo { get; private set; }
public IEnumerable<ReleaseInfo> Releases { get; set; }
@ -21,7 +21,7 @@ namespace Jackett.Common.Models
Releases = new List<ReleaseInfo>();
}
string xmlDateFormat(DateTime dt)
private string xmlDateFormat(DateTime dt)
{
Thread.CurrentThread.CurrentCulture = new CultureInfo("en-US");
//Sat, 14 Mar 2015 17:10:42 -0400
@ -29,7 +29,7 @@ namespace Jackett.Common.Models
return f;
}
XElement getTorznabElement(string name, object value)
private XElement getTorznabElement(string name, object value)
{
return value == null ? null : new XElement(torznabNs + "attr", new XAttribute("name", name), new XAttribute("value", value));
}

View File

@ -59,7 +59,7 @@ namespace Jackett.Common.Models
MovieSearchAvailable = Categories.Any(i => TorznabCatType.Movies.Contains(i));
}
string SupportedTVSearchParams
private string SupportedTVSearchParams
{
get
{
@ -72,7 +72,7 @@ namespace Jackett.Common.Models
}
}
string SupportedMovieSearchParams
private string SupportedMovieSearchParams
{
get
{
@ -83,7 +83,7 @@ namespace Jackett.Common.Models
}
}
string SupportedMusicSearchParams
private string SupportedMusicSearchParams
{
get
{

View File

@ -1,4 +1,3 @@
using System;
using System.Collections.Generic;
using Newtonsoft.Json.Linq;
@ -42,7 +41,7 @@ namespace Jackett.Common.Models
return t;
}
public override bool Equals(Object obj)
public override bool Equals(object obj)
{
if (obj == null || GetType() != obj.GetType())
return false;

View File

@ -204,7 +204,7 @@ namespace Jackett.Common.Models
limit = queryString.Length;
queryString = queryString.Substring(0, (int)limit);
}
Regex SplitRegex = new Regex("[^a-zA-Z0-9]+");
var SplitRegex = new Regex("[^a-zA-Z0-9]+");
QueryStringParts = SplitRegex.Split(queryString);
}
@ -225,8 +225,7 @@ namespace Jackett.Common.Models
return string.Empty;
string episodeString;
DateTime showDate;
if (DateTime.TryParseExact(string.Format("{0} {1}", Season, Episode), "yyyy MM/dd", CultureInfo.InvariantCulture, DateTimeStyles.None, out showDate))
if (DateTime.TryParseExact(string.Format("{0} {1}", Season, Episode), "yyyy MM/dd", CultureInfo.InvariantCulture, DateTimeStyles.None, out var showDate))
episodeString = showDate.ToString("yyyy.MM.dd");
else if (string.IsNullOrEmpty(Episode))
episodeString = string.Format("S{0:00}", Season);

View File

@ -2,7 +2,7 @@ using System.Collections.Generic;
namespace Jackett.Common.Models
{
class TrackerCache
internal class TrackerCache
{
public string TrackerId { set; get; }
public string TrackerName { set; get; }

View File

@ -12,7 +12,7 @@ namespace Jackett.Common.Plumbing
{
public class JackettModule : Autofac.Module
{
private RuntimeSettings _runtimeSettings;
private readonly RuntimeSettings _runtimeSettings;
public JackettModule(RuntimeSettings runtimeSettings)
{
@ -82,13 +82,13 @@ namespace Jackett.Common.Plumbing
private static bool DetectMonoCompatabilityWithHttpClient()
{
bool usehttpclient = false;
var usehttpclient = false;
try
{
Type monotype = Type.GetType("Mono.Runtime");
var monotype = Type.GetType("Mono.Runtime");
if (monotype != null)
{
MethodInfo displayName = monotype.GetMethod("GetDisplayName", BindingFlags.NonPublic | BindingFlags.Static);
var displayName = monotype.GetMethod("GetDisplayName", BindingFlags.NonPublic | BindingFlags.Static);
if (displayName != null)
{
var monoVersion = displayName.Invoke(null, null).ToString();
@ -97,17 +97,17 @@ namespace Jackett.Common.Plumbing
{
// check if btls is supported
var monoSecurity = Assembly.Load("Mono.Security");
Type monoTlsProviderFactory = monoSecurity.GetType("Mono.Security.Interface.MonoTlsProviderFactory");
var monoTlsProviderFactory = monoSecurity.GetType("Mono.Security.Interface.MonoTlsProviderFactory");
if (monoTlsProviderFactory != null)
{
MethodInfo isProviderSupported = monoTlsProviderFactory.GetMethod("IsProviderSupported");
var isProviderSupported = monoTlsProviderFactory.GetMethod("IsProviderSupported");
if (isProviderSupported != null)
{
var btlsSupported = (bool)isProviderSupported.Invoke(null, new string[] { "btls" });
if (btlsSupported)
{
// initialize btls
MethodInfo initialize = monoTlsProviderFactory.GetMethod("Initialize", new[] { typeof(string) });
var initialize = monoTlsProviderFactory.GetMethod("Initialize", new[] { typeof(string) });
if (initialize != null)
{
initialize.Invoke(null, new string[] { "btls" });

View File

@ -53,7 +53,7 @@ namespace Jackett.Common.Services
{
lock (cache)
{
int newItemCount = 0;
var newItemCount = 0;
var trackerCache = cache.Where(c => c.TrackerId == indexer.ID).FirstOrDefault();
if (trackerCache != null)
{

View File

@ -15,10 +15,10 @@ namespace Jackett.Common.Services
public class ConfigurationService : IConfigurationService
{
private ISerializeService serializeService;
private Logger logger;
private IProcessService processService;
private RuntimeSettings runtimeSettings;
private readonly ISerializeService serializeService;
private readonly Logger logger;
private readonly IProcessService processService;
private readonly RuntimeSettings runtimeSettings;
public ConfigurationService(ISerializeService s, IProcessService p, Logger l, RuntimeSettings settings)
{
@ -59,7 +59,7 @@ namespace Jackett.Common.Services
{
try
{
string oldDir = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), "Jackett");
var oldDir = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), "Jackett");
if (Directory.Exists(oldDir))
{
@ -175,7 +175,7 @@ namespace Jackett.Common.Services
public string GetContentFolder()
{
// If we are debugging we can use the non copied content.
string dir = Path.Combine(ApplicationFolder(), "Content");
var dir = Path.Combine(ApplicationFolder(), "Content");
;
#if DEBUG
@ -191,7 +191,7 @@ namespace Jackett.Common.Services
public List<string> GetCardigannDefinitionsFolders()
{
List<string> dirs = new List<string>();
var dirs = new List<string>();
if (System.Environment.OSVersion.Platform == PlatformID.Unix)
{
@ -205,7 +205,7 @@ namespace Jackett.Common.Services
}
// If we are debugging we can use the non copied definitions.
string dir = Path.Combine(ApplicationFolder(), "Definitions");
var dir = Path.Combine(ApplicationFolder(), "Definitions");
;
#if DEBUG

View File

@ -42,8 +42,8 @@ namespace Jackett.Common.Services
return movie;
}
private WebClient WebClient;
private string apiKey;
private readonly WebClient WebClient;
private readonly string apiKey;
private string url;
}
}

View File

@ -146,8 +146,8 @@ namespace Jackett.Common.Services
return Path.Combine(configService.GetIndexerConfigDir(), indexer.ID + ".json");
}
private IConfigurationService configService;
private Logger logger;
private readonly IConfigurationService configService;
private readonly Logger logger;
private static readonly object configWriteLock = new object();
}

View File

@ -19,16 +19,16 @@ namespace Jackett.Common.Services
public class IndexerManagerService : IIndexerManagerService
{
private ICacheService cacheService;
private IIndexerConfigurationService configService;
private IProtectionService protectionService;
private WebClient webClient;
private IProcessService processService;
private IConfigurationService globalConfigService;
private ServerConfig serverConfig;
private Logger logger;
private readonly ICacheService cacheService;
private readonly IIndexerConfigurationService configService;
private readonly IProtectionService protectionService;
private readonly WebClient webClient;
private readonly IProcessService processService;
private readonly IConfigurationService globalConfigService;
private readonly ServerConfig serverConfig;
private readonly Logger logger;
private Dictionary<string, IIndexer> indexers = new Dictionary<string, IIndexer>();
private readonly Dictionary<string, IIndexer> indexers = new Dictionary<string, IIndexer>();
private AggregateIndexer aggregateIndexer;
public IndexerManagerService(IIndexerConfigurationService config, IProtectionService protectionService, WebClient webClient, Logger l, ICacheService cache, IProcessService processService, IConfigurationService globalConfigService, ServerConfig serverConfig)
@ -108,7 +108,7 @@ namespace Jackett.Common.Services
try
{
string DefinitionString = File.ReadAllText(file.FullName);
var DefinitionString = File.ReadAllText(file.FullName);
var definition = deserializer.Deserialize<IndexerDefinition>(DefinitionString);
return definition;
}
@ -119,7 +119,7 @@ namespace Jackett.Common.Services
}
}).Where(definition => definition != null);
List<IIndexer> cardigannIndexers = definitions.Select(definition =>
var cardigannIndexers = definitions.Select(definition =>
{
try
{

Some files were not shown because too many files have changed in this diff Show More