core: Add torznab cache option for individual indexers (#12235)

This commit is contained in:
mikeoscar2006 2021-08-30 08:39:48 +05:30 committed by GitHub
parent 0b4195f3cb
commit 88959ac824
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 22 additions and 4 deletions

View File

@ -738,6 +738,9 @@ configuration or even disable the cache. Keep in mind that you can be banned by
* **Cache TTL (seconds)**: (default 2100 / 35 minutes) It indicates how long the results can remain in the cache.
* **Cache max results per indexer**: (default 1000) How many results are kept in cache for each indexer. This limit is used to limit the use of RAM. If you make many requests and you have enough memory, increase this number.
## Torznab cache
If you have enabled the Jackett internal cache, but have an indexer for which you would prefer to fetch fresh results (thus ignoring the internal cache) then add the **&cache=false** parameter to your torznab query.
## Configuring FlareSolverr
Some indexers are protected by CloudFlare or similar services and Jackett is not able to solve the challenges.
For these cases, [FlareSolverr](https://github.com/FlareSolverr/FlareSolverr) has been integrated into Jackett. This service is in charge of solving the challenges and configuring Jackett with the necessary cookies.

View File

@ -390,9 +390,12 @@ namespace Jackett.Common.Indexers
if (!CanHandleQuery(query) || !CanHandleCategories(query, isMetaIndexer))
return new IndexerResult(this, new ReleaseInfo[0], false);
var cachedReleases = cacheService.Search(this, query);
if (cachedReleases != null)
return new IndexerResult(this, cachedReleases, true);
if (query.Cache)
{
var cachedReleases = cacheService.Search(this, query);
if (cachedReleases != null)
return new IndexerResult(this, cachedReleases, true);
}
try
{

View File

@ -26,6 +26,7 @@ namespace Jackett.Common.Models.DTO
public string author { get; set; }
public string title { get; set; }
public string configured { get; set; }
public string cache { get; set; }
public static TorznabQuery ToTorznabQuery(TorznabRequest request)
{
@ -45,6 +46,12 @@ namespace Jackett.Common.Models.DTO
if (!string.IsNullOrWhiteSpace(request.offset))
query.Offset = ParseUtil.CoerceInt(request.offset);
bool _cache;
if (bool.TryParse(request.cache, out _cache))
{
query.Cache = _cache;
}
if (request.cat != null)
{
query.Categories = request.cat.Split(',').Where(s => !string.IsNullOrWhiteSpace(s)).Select(s => int.Parse(s)).ToArray();

View File

@ -19,6 +19,7 @@ namespace Jackett.Common.Models
public int? TvdbID { get; set; }
public string ImdbID { get; set; }
public int? TmdbID { get; set; }
public bool Cache { get; set; } = true;
public int Season { get; set; }
public string Episode { get; set; }
@ -137,7 +138,8 @@ namespace Jackett.Common.Models
RageID = RageID,
TvdbID = TvdbID,
ImdbID = ImdbID,
TmdbID = TmdbID
TmdbID = TmdbID,
Cache = Cache
};
if (Categories?.Length > 0)
{

View File

@ -252,6 +252,9 @@ namespace Jackett.Common.Services
// Both request must return the same results, if not we are breaking Jackett search
json = json.Replace("\"SearchTerm\":null", "\"SearchTerm\":\"\"");
// The Cache parameter's value should not affect caching itself
json = json.Replace("\"Cache\":false", "\"Cache\":true");
return json;
}