mirror of
https://github.com/evilhero/mylar
synced 2025-03-09 13:24:53 +00:00
IMP: Added config.ini option to not create series folders (create_folder=True/False) when adding/refreshing series, IMP: Adde test connection button for rtorrent, FIX: Fixed weekly pull recreate, FIX: Removed TorrentProject from public torrent searches, FIX: Updated url for demonoid again, FIX: Fixed schedulers not running/returning error when forced from schedulers tab, IMP: Removed some more unnecesary logging lines
This commit is contained in:
parent
dccbdcdba8
commit
c45a70b0da
12 changed files with 157 additions and 97 deletions
|
@ -505,7 +505,7 @@
|
|||
<fieldset id="rtorrent_options">
|
||||
<div class="row">
|
||||
<label>rTorrent Host:port(optional)</label>
|
||||
<input type="text" name="rtorrent_host" value="${config['rtorrent_host']}" size="30">
|
||||
<input type="text" id="rtorrent_host" name="rtorrent_host" value="${config['rtorrent_host']}" size="30">
|
||||
<small>ie. my.rtorrent:80, 192.168.1.1, scgi://localhost:5000</small>
|
||||
</div>
|
||||
<div class="row checkbox left clearfix">
|
||||
|
@ -518,7 +518,7 @@
|
|||
</div>
|
||||
<div class="row">
|
||||
<label>rTorrent Authentication</label>
|
||||
<select name="rtorrent_authentication">
|
||||
<select name="rtorrent_authentication" id="rtorrent_authentication">
|
||||
%for x in ['basic', 'digest']:
|
||||
<%
|
||||
if config['rtorrent_authentication'] == x:
|
||||
|
@ -532,16 +532,16 @@
|
|||
</div>
|
||||
<div class="row">
|
||||
<label>rTorrent RPC</label>
|
||||
<input type="text" name="rtorrent_rpc_url" value="${config['rtorrent_rpc_url']}" size="30">
|
||||
<input type="text" id="rtorrent_rpc_url" name="rtorrent_rpc_url" value="${config['rtorrent_rpc_url']}" size="30">
|
||||
<small>ie. httprpc plugin = rutorrent/plugins/httprpc/action.php<br>rpc plugin = user/RPC2</small>
|
||||
</div>
|
||||
<div class="row">
|
||||
<label>rTorrent Username</label>
|
||||
<input type="text" name="rtorrent_username" value="${config['rtorrent_username']}" size="30">
|
||||
<input type="text" id="rtorrent_username" name="rtorrent_username" value="${config['rtorrent_username']}" size="30">
|
||||
</div>
|
||||
<div class="row">
|
||||
<label>rTorrent Password</label>
|
||||
<input type="password" name="rtorrent_password" value="${config['rtorrent_password']| h}" size="30">
|
||||
<input type="password" id="rtorrent_password" name="rtorrent_password" value="${config['rtorrent_password']| h}" size="30">
|
||||
</div>
|
||||
<div class="row">
|
||||
<label>rTorrent Directory</label>
|
||||
|
@ -557,6 +557,9 @@
|
|||
<input id="rtorrent_startonload" type="checkbox" name="rtorrent_startonload" value="1" ${config['rtorrent_startonload']} /><label>Start Torrent on Successful Load</label>
|
||||
<small>Automatically start torrent on successful loading within rtorrent client</small>
|
||||
</div>
|
||||
<div class="row">
|
||||
<input type="button" value="Test Connection" id="rtorrent_test" />
|
||||
</div>
|
||||
</fieldset>
|
||||
<fieldset id="transmission_options">
|
||||
<div class="row">
|
||||
|
@ -825,6 +828,10 @@
|
|||
</div>
|
||||
<div class="row checkbox">
|
||||
<input id="newznab_enabled" type="checkbox" name="newznab_enabled${newznab_number}" value="1" ${newznab_enabled} /><label>Enabled</label>
|
||||
<!--
|
||||
<input type="button" value="Test ${newznab[0]}" id="newznab_test(newznab${newznab_number})" style="float:center" /></br>
|
||||
<img name="newznabstatus${newznab_number}" id="newznabstatus${newznab_number}" src="interfaces/default/images/newznab_success.png" style="vertical-align: middle; margin: 3px; margin-top: -1px;" height="10" width="10" DISABLED />
|
||||
-->
|
||||
</div>
|
||||
<div class="row">
|
||||
<input type="button" class="remove" id="newznab${newznab_number}" value="Remove ${newznab[0]}">
|
||||
|
@ -1871,6 +1878,44 @@
|
|||
$('#autoadd').append('<input type="hidden" name="tsab" value=1 />');
|
||||
};
|
||||
|
||||
$('#rtorrent_test').click(function () {
|
||||
var host = document.getElementById('rtorrent_host').value;
|
||||
var username = document.getElementById('rtorrent_username').value;
|
||||
var password = document.getElementById('rtorrent_password').value;
|
||||
var auth = document.getElementById('rtorrent_authentication').value;
|
||||
var verify = document.getElementById('rtorrent_verify').value;
|
||||
var ssl = document.getElementById('rtorrent_ssl').value;
|
||||
var rpc_url = document.getElementById('rtorrent_rpc_url').value;
|
||||
alert(host);
|
||||
$.get("testrtorrent",
|
||||
{ host: host, username: username, password: password, auth: auth, verify: verify, ssl: ssl, rpc_url: rpc_url },
|
||||
function(data){
|
||||
if (data.error != undefined) {
|
||||
alert(data.error);
|
||||
return;
|
||||
}
|
||||
//$('#rtorrentstatus').val(data);
|
||||
$('#ajaxMsg').html("<div class='msg'><span class='ui-icon ui-icon-check'></span>"+data+"</div>");
|
||||
});
|
||||
$('#ajaxMsg').addClass('success').fadeIn().delay(3000).fadeOut();
|
||||
});
|
||||
|
||||
$('#newznab_test').click(function (x) {
|
||||
// var name = document.getElementById("newznab_name").value;
|
||||
alert(x);
|
||||
$.get("testnewznab",
|
||||
{ name: name, host: host, ssl: ssl, apikey: apikey },
|
||||
function(data){
|
||||
if (data.error != undefined) {
|
||||
alert(data.error);
|
||||
return;
|
||||
}
|
||||
$('#newznabstatus').show();
|
||||
$('#ajaxMsg').html("Successfully connected to newnzab");
|
||||
});
|
||||
$('#ajaxMsg').addClass('success').fadeIn().delay(3000).fadeOut();
|
||||
});
|
||||
|
||||
$('#nma_test').click(function () {
|
||||
var apikey = document.getElementById('nma_apikey').value;
|
||||
$.get("testNMA",
|
||||
|
|
|
@ -267,7 +267,7 @@ def initialize(config_file):
|
|||
|
||||
#set default URL for Public trackers (just in case it changes more frequently)
|
||||
WWTURL = 'https://worldwidetorrents.me/'
|
||||
DEMURL = 'https://dnoid.me/'
|
||||
DEMURL = 'https://www.demonoid.pw/'
|
||||
TPSEURL = 'https://torrentproject.se/'
|
||||
|
||||
if CONFIG.LOCMOVE:
|
||||
|
@ -366,7 +366,7 @@ def start():
|
|||
#let's do a run at the Wanted issues here (on startup) if enabled.
|
||||
ss = searchit.CurrentSearcher()
|
||||
if CONFIG.NZB_STARTUP_SEARCH:
|
||||
SCHED.add_job(func=ss.run, id='search', next_run_time=datetime.datetime.now(), name='Auto-Search', trigger=IntervalTrigger(hours=0, minutes=CONFIG.SEARCH_INTERVAL, timezone='UTC'))
|
||||
SCHED.add_job(func=ss.run, id='search', next_run_time=datetime.datetime.utcnow(), name='Auto-Search', trigger=IntervalTrigger(hours=0, minutes=CONFIG.SEARCH_INTERVAL, timezone='UTC'))
|
||||
else:
|
||||
if SCHED_SEARCH_LAST is not None:
|
||||
search_timestamp = float(SCHED_SEARCH_LAST)
|
||||
|
|
|
@ -69,6 +69,7 @@ _CONFIG_DEFINITIONS = OrderedDict({
|
|||
'SEND2READ': (bool, 'General', False),
|
||||
'NZB_STARTUP_SEARCH': (bool, 'General', False),
|
||||
'UNICODE_ISSUENUMBER': (bool, 'General', False),
|
||||
'CREATE_FOLDERS': (bool, 'General', True),
|
||||
|
||||
'RSS_CHECKINTERVAL': (int, 'Scheduler', 20),
|
||||
'SEARCH_INTERVAL': (int, 'Scheduler', 360),
|
||||
|
|
|
@ -1097,10 +1097,10 @@ def checkthepub(ComicID):
|
|||
else:
|
||||
for publish in publishers:
|
||||
if publish in pubchk['ComicPublisher'].lower():
|
||||
logger.fdebug('Biggie publisher detected - ' + pubchk['ComicPublisher'])
|
||||
#logger.fdebug('Biggie publisher detected - ' + pubchk['ComicPublisher'])
|
||||
return mylar.CONFIG.BIGGIE_PUB
|
||||
|
||||
logger.fdebug('Indie publisher detected - ' + pubchk['ComicPublisher'])
|
||||
#logger.fdebug('Indie publisher detected - ' + pubchk['ComicPublisher'])
|
||||
return mylar.CONFIG.INDIE_PUB
|
||||
|
||||
def annual_update():
|
||||
|
@ -1272,7 +1272,6 @@ def LoadAlternateSearchNames(seriesname_alt, comicid):
|
|||
|
||||
#logger.fdebug('seriesname_alt:' + str(seriesname_alt))
|
||||
if seriesname_alt is None or seriesname_alt == 'None':
|
||||
logger.fdebug('no Alternate name given. Aborting search.')
|
||||
return "no results"
|
||||
else:
|
||||
chkthealt = seriesname_alt.split('##')
|
||||
|
@ -3247,6 +3246,19 @@ def stupidchk():
|
|||
mylar.COUNT_COMICS = CCOMICS[0][0]
|
||||
mylar.EN_OOMICS = ens[0][0]
|
||||
|
||||
def newznab_test(name, host, ssl, apikey):
|
||||
params = {'t': 'caps',
|
||||
'apikey': apikey,
|
||||
'o': json}
|
||||
import requests
|
||||
try:
|
||||
response = requests.get(host, params=params, verify=ssl)
|
||||
except:
|
||||
logger.warn('Unable to connect')
|
||||
return
|
||||
else:
|
||||
logger.info('Successfully connected: %s' % response['status_code'])
|
||||
|
||||
|
||||
def file_ops(path,dst,arc=False,one_off=False):
|
||||
# # path = source path + filename
|
||||
|
|
|
@ -51,14 +51,8 @@ def is_exists(comicid):
|
|||
|
||||
|
||||
def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=None, calledfrom=None, annload=None, chkwant=None, issuechk=None, issuetype=None, latestissueinfo=None, csyear=None):
|
||||
# Putting this here to get around the circular import. Will try to use this to update images at later date.
|
||||
# from mylar import cache
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
# We need the current minimal info in the database instantly
|
||||
# so we don't throw a 500 error when we redirect to the artistPage
|
||||
|
||||
controlValueDict = {"ComicID": comicid}
|
||||
|
||||
dbcomic = myDB.selectone('SELECT * FROM comics WHERE ComicID=?', [comicid]).fetchone()
|
||||
|
@ -86,10 +80,11 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
|
|||
latestissueinfo.append({"latestiss": dbcomic['LatestIssue'],
|
||||
"latestdate": dbcomic['LatestDate']})
|
||||
|
||||
checkdirectory = filechecker.validateAndCreateDirectory(comlocation, True)
|
||||
if not checkdirectory:
|
||||
logger.warn('Error trying to validate/create directory. Aborting this process at this time.')
|
||||
return
|
||||
if mylar.CONFIG.CREATE_FOLDERS is True:
|
||||
checkdirectory = filechecker.validateAndCreateDirectory(comlocation, True)
|
||||
if not checkdirectory:
|
||||
logger.warn('Error trying to validate/create directory. Aborting this process at this time.')
|
||||
return
|
||||
oldcomversion = dbcomic['ComicVersion'] #store the comicversion and chk if it exists before hammering.
|
||||
myDB.upsert("comics", newValueDict, controlValueDict)
|
||||
|
||||
|
@ -251,16 +246,11 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
|
|||
if os.path.isdir(comlocation):
|
||||
logger.info('Directory (' + comlocation + ') already exists! Continuing...')
|
||||
else:
|
||||
#print ("Directory doesn't exist!")
|
||||
#try:
|
||||
# os.makedirs(str(comlocation))
|
||||
# logger.info(u"Directory successfully created at: " + str(comlocation))
|
||||
#except OSError:
|
||||
# logger.error(u"Could not create comicdir : " + str(comlocation))
|
||||
checkdirectory = filechecker.validateAndCreateDirectory(comlocation, True)
|
||||
if not checkdirectory:
|
||||
logger.warn('Error trying to validate/create directory. Aborting this process at this time.')
|
||||
return
|
||||
if mylar.CONFIG.CREATE_FOLDERS is True:
|
||||
checkdirectory = filechecker.validateAndCreateDirectory(comlocation, True)
|
||||
if not checkdirectory:
|
||||
logger.warn('Error trying to validate/create directory. Aborting this process at this time.')
|
||||
return
|
||||
|
||||
#try to account for CV not updating new issues as fast as GCD
|
||||
#seems CV doesn't update total counts
|
||||
|
@ -358,7 +348,7 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
|
|||
#ComicImage = "http://" + str(mylar.CONFIG.HTTP_HOST) + ":" + str(mylar.CONFIG.HTTP_PORT) + "/cache/" + str(comicid) + ".jpg"
|
||||
|
||||
#if the comic cover local is checked, save a cover.jpg to the series folder.
|
||||
if mylar.CONFIG.COMIC_COVER_LOCAL:
|
||||
if mylar.CONFIG.COMIC_COVER_LOCAL and os.path.isdir(comlocation):
|
||||
try:
|
||||
comiclocal = os.path.join(comlocation, 'cover.jpg')
|
||||
shutil.copyfile(coverfile, comiclocal)
|
||||
|
@ -433,7 +423,7 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
|
|||
if anndata:
|
||||
manualAnnual(annchk=anndata)
|
||||
|
||||
if mylar.CONFIG.CVINFO or (mylar.CONFIG.CV_ONLY and mylar.CONFIG.CVINFO):
|
||||
if (mylar.CONFIG.CVINFO or (mylar.CONFIG.CV_ONLY and mylar.CONFIG.CVINFO)) and os.path.isdir(comlocation):
|
||||
if not os.path.exists(os.path.join(comlocation, "cvinfo")) or mylar.CONFIG.CV_ONETIMER:
|
||||
with open(os.path.join(comlocation, "cvinfo"), "w") as text_file:
|
||||
text_file.write(str(comic['ComicURL']))
|
||||
|
@ -701,16 +691,11 @@ def GCDimport(gcomicid, pullupd=None, imported=None, ogcname=None):
|
|||
if os.path.isdir(comlocation):
|
||||
logger.info(u"Directory (" + comlocation + ") already exists! Continuing...")
|
||||
else:
|
||||
#print ("Directory doesn't exist!")
|
||||
#try:
|
||||
# os.makedirs(str(comlocation))
|
||||
# logger.info(u"Directory successfully created at: " + str(comlocation))
|
||||
#except OSError:
|
||||
# logger.error(u"Could not create comicdir : " + str(comlocation))
|
||||
checkdirectory = filechecker.validateAndCreateDirectory(comlocation, True)
|
||||
if not checkdirectory:
|
||||
logger.warn('Error trying to validate/create directory. Aborting this process at this time.')
|
||||
return
|
||||
if mylar.CONFIG.CREATE_FOLDERS is True:
|
||||
checkdirectory = filechecker.validateAndCreateDirectory(comlocation, True)
|
||||
if not checkdirectory:
|
||||
logger.warn('Error trying to validate/create directory. Aborting this process at this time.')
|
||||
return
|
||||
|
||||
comicIssues = gcdinfo['totalissues']
|
||||
|
||||
|
@ -744,7 +729,7 @@ def GCDimport(gcomicid, pullupd=None, imported=None, ogcname=None):
|
|||
|
||||
logger.info(u"Sucessfully retrieved cover for " + ComicName)
|
||||
#if the comic cover local is checked, save a cover.jpg to the series folder.
|
||||
if mylar.CONFIG.COMIC_COVER_LOCAL:
|
||||
if mylar.CONFIG.COMIC_COVER_LOCAL and os.path.isdir(comlocation):
|
||||
comiclocal = os.path.join(comlocation + "/cover.jpg")
|
||||
shutil.copy(ComicImage, comiclocal)
|
||||
except IOError as e:
|
||||
|
@ -890,7 +875,7 @@ def GCDimport(gcomicid, pullupd=None, imported=None, ogcname=None):
|
|||
|
||||
myDB.upsert("comics", newValueStat, controlValueStat)
|
||||
|
||||
if mylar.CONFIG.CVINFO:
|
||||
if mylar.CONFIG.CVINFO and os.path.isdir(comlocation):
|
||||
if not os.path.exists(comlocation + "/cvinfo"):
|
||||
with open(comlocation + "/cvinfo", "w") as text_file:
|
||||
text_file.write("http://comicvine.gamespot.com/volume/49-" + str(comicid))
|
||||
|
|
|
@ -95,12 +95,16 @@ def torrents(pickfeed=None, seriesname=None, issue=None, feedinfo=None):
|
|||
feedtype = ' from the New Releases RSS Feed for comics'
|
||||
verify = bool(mylar.CONFIG.VERIFY_32P)
|
||||
elif pickfeed == "2" and srchterm is not None: # TP.SE search / RSS
|
||||
feed = tpse_url + 'rss/' + str(srchterm) + '/'
|
||||
verify = bool(mylar.CONFIG.TPSE_VERIFY)
|
||||
lp+=1
|
||||
continue
|
||||
#feed = tpse_url + 'rss/' + str(srchterm) + '/'
|
||||
#verify = bool(mylar.CONFIG.TPSE_VERIFY)
|
||||
elif pickfeed == "3": # TP.SE rss feed (3101 = comics category) / non-RSS
|
||||
feed = tpse_url + '?hl=en&safe=off&num=50&start=0&orderby=best&s=&filter=3101'
|
||||
feedtype = ' from the New Releases RSS Feed for comics from TP.SE'
|
||||
verify = bool(mylar.CONFIG.TPSE_VERIFY)
|
||||
lp+=1
|
||||
continue
|
||||
#feed = tpse_url + '?hl=en&safe=off&num=50&start=0&orderby=best&s=&filter=3101'
|
||||
#feedtype = ' from the New Releases RSS Feed for comics from TP.SE'
|
||||
#verify = bool(mylar.CONFIG.TPSE_VERIFY)
|
||||
elif pickfeed == "4": #32p search
|
||||
if any([mylar.CONFIG.USERNAME_32P is None, mylar.CONFIG.USERNAME_32P == '', mylar.CONFIG.PASSWORD_32P is None, mylar.CONFIG.PASSWORD_32P == '']):
|
||||
logger.error('[RSS] Warning - you NEED to enter in your 32P Username and Password to use this option.')
|
||||
|
|
|
@ -53,7 +53,7 @@ class tehMain():
|
|||
if mylar.CONFIG.ENABLE_TORRENT_SEARCH:
|
||||
logger.info('[RSS-FEEDS] Initiating Torrent RSS Check.')
|
||||
if mylar.CONFIG.ENABLE_TPSE:
|
||||
logger.info('[RSS-FEEDS] Initiating Torrent RSS Feed Check on TorrentProject.')
|
||||
logger.info('[RSS-FEEDS] Initiating Torrent RSS Feed Check on Demonoid / WorldWideTorrents.')
|
||||
#rsscheck.torrents(pickfeed='3') #TP.SE RSS Check (has to be page-parsed)
|
||||
rsscheck.torrents(pickfeed='TPSE') #TPSE = DEM RSS Check + WWT RSS Check
|
||||
if mylar.CONFIG.ENABLE_32P:
|
||||
|
|
|
@ -1882,13 +1882,9 @@ def provider_sequence(nzbprovider, torprovider, newznab_hosts):
|
|||
prov_order = []
|
||||
|
||||
nzbproviders_lower = [x.lower() for x in nzbprovider]
|
||||
print nzbprovider
|
||||
print mylar.CONFIG.PROVIDER_ORDER
|
||||
|
||||
print len(mylar.CONFIG.PROVIDER_ORDER)
|
||||
if len(mylar.CONFIG.PROVIDER_ORDER) > 0:
|
||||
for pr_order in sorted(mylar.CONFIG.PROVIDER_ORDER.items(), key=itemgetter(0), reverse=False):
|
||||
print pr_order
|
||||
logger.fdebug('looking for ' + str(pr_order[1]).lower())
|
||||
logger.fdebug('nzbproviders ' + str(nzbproviders_lower))
|
||||
logger.fdebug('torproviders ' + str(torprovider))
|
||||
|
|
|
@ -39,7 +39,11 @@ class RTorrent(object):
|
|||
if not self.client.connect(mylar.CONFIG.RTORRENT_HOST,
|
||||
mylar.CONFIG.RTORRENT_USERNAME,
|
||||
mylar.CONFIG.RTORRENT_PASSWORD,
|
||||
mylar.CONFIG.RTORRENT_AUTHENTICATION):
|
||||
mylar.CONFIG.RTORRENT_AUTHENTICATION,
|
||||
mylar.CONFIG.RTORRENT_VERIFY,
|
||||
mylar.CONFIG.RTORRENT_SSL,
|
||||
mylar.CONFIG.RTORRENT_RPC_URL,
|
||||
mylar.CONFIG.RTORRENT_CA_BUNDLE):
|
||||
logger.error('could not connect to %s, exiting', mylar.CONFIG.RTORRENT_HOST)
|
||||
sys.exit(-1)
|
||||
|
||||
|
|
|
@ -11,36 +11,36 @@ class TorrentClient(object):
|
|||
def __init__(self):
|
||||
self.conn = None
|
||||
|
||||
def getVerifySsl(self):
|
||||
def getVerifySsl(self, verify, ca_bundle):
|
||||
# Ensure verification has been enabled
|
||||
if not mylar.CONFIG.RTORRENT_VERIFY:
|
||||
if not verify:
|
||||
return False
|
||||
|
||||
# Use ca bundle if defined
|
||||
if mylar.CONFIG.RTORRENT_CA_BUNDLE is not None and os.path.exists(mylar.CONFIG.RTORRENT_CA_BUNDLE):
|
||||
return mylar.CONFIG.RTORRENT_CA_BUNDLE
|
||||
if ca_bundle is not None and os.path.exists(ca_bundle):
|
||||
return ca_bundle
|
||||
|
||||
# Use default ssl verification
|
||||
return True
|
||||
|
||||
def connect(self, host, username, password, auth):
|
||||
def connect(self, host, username, password, auth, verify, ssl, rpc_url, ca_bundle):
|
||||
if self.conn is not None:
|
||||
return self.conn
|
||||
|
||||
if not host:
|
||||
return False
|
||||
|
||||
url = helpers.cleanHost(host, protocol = True, ssl = mylar.CONFIG.RTORRENT_SSL)
|
||||
url = helpers.cleanHost(host, protocol = True, ssl = ssl)
|
||||
|
||||
# Automatically add '+https' to 'httprpc' protocol if SSL is enabled
|
||||
if mylar.CONFIG.RTORRENT_SSL and url.startswith('httprpc://'):
|
||||
if ssl is True and url.startswith('httprpc://'):
|
||||
url = url.replace('httprpc://', 'httprpc+https://')
|
||||
|
||||
parsed = urlparse(url)
|
||||
|
||||
# rpc_url is only used on http/https scgi pass-through
|
||||
if parsed.scheme in ['http', 'https']:
|
||||
url += mylar.CONFIG.RTORRENT_RPC_URL
|
||||
url += rpc_url
|
||||
|
||||
#logger.fdebug(url)
|
||||
|
||||
|
@ -49,7 +49,7 @@ class TorrentClient(object):
|
|||
self.conn = RTorrent(
|
||||
url,(auth, username, password),
|
||||
verify_server=True,
|
||||
verify_ssl=self.getVerifySsl()
|
||||
verify_ssl=self.getVerifySsl(verify, ca_bundle)
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error('Failed to connect to rTorrent: %s', err)
|
||||
|
@ -60,7 +60,7 @@ class TorrentClient(object):
|
|||
self.conn = RTorrent(
|
||||
url, (auth, username, password),
|
||||
verify_server=True,
|
||||
verify_ssl=self.getVerifySsl()
|
||||
verify_ssl=self.getVerifySsl(verify, ca_bundle)
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error('Failed to connect to rTorrent: %s', err)
|
||||
|
|
|
@ -30,5 +30,3 @@ class CheckVersion():
|
|||
versioncheck.checkGithub()
|
||||
helpers.job_management(write=True, job='Check Version', last_run_completed=helpers.utctimestamp(), status='Waiting')
|
||||
mylar.VERSION_STATUS = 'Waiting'
|
||||
logger.info('updated')
|
||||
return
|
||||
|
|
|
@ -241,7 +241,7 @@ class WebInterface(object):
|
|||
try:
|
||||
searchresults = mb.findComic(name, mode, issue=None)
|
||||
except TypeError:
|
||||
logger.error('Unable to perform required pull-list search for : [name: ' + name + '][mode: ' + mode + ']')
|
||||
logger.error('Unable to perform required search for : [name: ' + name + '][mode: ' + mode + ']')
|
||||
return
|
||||
elif type == 'comic' and mode == 'want':
|
||||
try:
|
||||
|
@ -887,9 +887,6 @@ class WebInterface(object):
|
|||
|
||||
def force_rss(self):
|
||||
logger.info('Attempting to run RSS Check Forcibly')
|
||||
#forcerss = True
|
||||
#threading.Thread(target=mylar.rsscheck.tehMain, args=[True]).start()
|
||||
#this is for use with the new scheduler not in place yet.
|
||||
forcethis = mylar.rsscheckit.tehMain()
|
||||
threading.Thread(target=forcethis.run, args=[True]).start()
|
||||
force_rss.exposed = True
|
||||
|
@ -1767,13 +1764,16 @@ class WebInterface(object):
|
|||
return {'status' : 'success'}
|
||||
manualpull.exposed = True
|
||||
|
||||
def pullrecreate(self):
|
||||
def pullrecreate(self, weeknumber=None, year=None):
|
||||
myDB = db.DBConnection()
|
||||
myDB.action("DROP TABLE weekly")
|
||||
mylar.dbcheck()
|
||||
logger.info("Deleted existed pull-list data. Recreating Pull-list...")
|
||||
if weeknumber is None:
|
||||
myDB.action("DROP TABLE weekly")
|
||||
mylar.dbcheck()
|
||||
logger.info("Deleted existed pull-list data. Recreating Pull-list...")
|
||||
else:
|
||||
myDB.action('DELETE FROM weekly WHERE weeknumber=? and year=?', [weeknumber, year])
|
||||
forcecheck = 'yes'
|
||||
weeklypull.pullit(forcecheck)
|
||||
weeklypull.pullit(forcecheck, weeknumber, year)
|
||||
raise cherrypy.HTTPRedirect("pullist")
|
||||
pullrecreate.exposed = True
|
||||
|
||||
|
@ -2163,32 +2163,19 @@ class WebInterface(object):
|
|||
ctrl = {'JobName': job}
|
||||
val = {'Status': 'Waiting'}
|
||||
myDB.upsert('jobhistory', val, ctrl)
|
||||
|
||||
helpers.job_management()
|
||||
else:
|
||||
logger.warn('%s cannot be matched against any scheduled jobs - maybe you should restart?' % job)
|
||||
|
||||
jobmanage.exposed = True
|
||||
|
||||
def schedulerForceCheck(self, jobid):
|
||||
from apscheduler.triggers.date import DateTrigger
|
||||
for jb in mylar.SCHED.get_jobs():
|
||||
#logger.info('jb : %s' % jb)
|
||||
if jobid.lower() in str(jb).lower():
|
||||
logger.info('[%s] Now force submitting job.' % jb)
|
||||
if jobid == 'rss':
|
||||
mylar.SCHED.add_job(func=jb.func, args=[True], trigger=DateTrigger(run_date=datetime.datetime.now()))
|
||||
elif jobid == 'weekly':
|
||||
mylar.SCHED.add_job(func=jb.func, trigger=DateTrigger(run_date=datetime.datetime.now()))
|
||||
elif jobid == 'search':
|
||||
mylar.SCHED.add_job(func=jb.func, trigger=DateTrigger(run_date=datetime.datetime.now()))
|
||||
elif jobid == 'version':
|
||||
mylar.SCHED.add_job(func=jb.func, trigger=DateTrigger(run_date=datetime.datetime.now()))
|
||||
elif jobid == 'updater':
|
||||
mylar.SCHED.add_job(func=jb.func, args=[None,None,True], trigger=DateTrigger(run_date=datetime.datetime.now()))
|
||||
elif jobid == 'monitor':
|
||||
mylar.SCHED.add_job(func=jb.func, trigger=DateTrigger(run_date=datetime.datetime.now()))
|
||||
logger.info('[%s] Now force submitting job for jobid %s' % (jb, jobid))
|
||||
if any([jobid == 'rss', jobid == 'weekly', jobid =='search', jobid == 'version', jobid == 'updater', jobid == 'monitor']):
|
||||
jb.modify(next_run_time=datetime.datetime.utcnow())
|
||||
break
|
||||
|
||||
schedulerForceCheck.exposed = True
|
||||
|
||||
def manageComics(self):
|
||||
|
@ -4548,10 +4535,11 @@ class WebInterface(object):
|
|||
# logger.info(u"Directory successfully created at: " + str(com_location))
|
||||
#except OSError:
|
||||
# logger.error(u"Could not create comicdir : " + str(com_location))
|
||||
checkdirectory = filechecker.validateAndCreateDirectory(com_location, True)
|
||||
if not checkdirectory:
|
||||
logger.warn('Error trying to validate/create directory. Aborting this process at this time.')
|
||||
return
|
||||
if mylar.CONFIG.CREATE_FOLDERS is True:
|
||||
checkdirectory = filechecker.validateAndCreateDirectory(com_location, True)
|
||||
if not checkdirectory:
|
||||
logger.warn('Error trying to validate/create directory. Aborting this process at this time.')
|
||||
return
|
||||
|
||||
myDB.upsert("comics", newValues, controlValueDict)
|
||||
raise cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % ComicID)
|
||||
|
@ -5149,6 +5137,33 @@ class WebInterface(object):
|
|||
return "Error sending test message to Slack"
|
||||
testslack.exposed = True
|
||||
|
||||
|
||||
def testrtorrent(self, host, username, password, auth, verify, ssl, rpc_url):
|
||||
import torrent.clients.rtorrent as TorClient
|
||||
client = TorClient.TorrentClient()
|
||||
ca_bundle = None
|
||||
if mylar.CONFIG.RTORRENT_CA_BUNDLE is not None:
|
||||
ca_bundle = mylar.CONFIG.RTORRENT_CA_BUNDLE
|
||||
if not client.connect(host, username, password, auth, verify, ssl, rpc_url, ca_bundle):
|
||||
logger.warn('Could not establish connection to %s' % host)
|
||||
return 'Error establishing connection to Rtorrent'
|
||||
else:
|
||||
logger.info('Successfully validated connection to %s' % host)
|
||||
return "Successfully validated connection to %s" % host
|
||||
testrtorrent.exposed = True
|
||||
|
||||
|
||||
def testnewznab(self, name, host, ssl, apikey):
|
||||
result = helpers.newznab_test(name, host, ssl, apikey)
|
||||
|
||||
if result == True:
|
||||
return "Successfully tested %s - valid api response received" % name
|
||||
else:
|
||||
logger.warn('Testing failed to %s [HOST:%s][SSL:%s][APIKEY:%s]' % (name, host, ssl, apikey))
|
||||
return "Error testing newznab data"
|
||||
testnewznab.exposed = True
|
||||
|
||||
|
||||
def orderThis(self, **kwargs):
|
||||
logger.info('here')
|
||||
return
|
||||
|
|
Loading…
Add table
Reference in a new issue