FIX: (#2006)(#1986) Auto-Want series were not being auto-added, and even after manually added would result in count error after sucessful post-processing, FIX: When filechecking, if a 'special' was included as part of the annuals of a given series, it would not be detected, IMP: (#1982) When ComicRN & Completed Download Handling (CDH) are both enabled, will now check to see if ComicRN is active for the given category in the respective client. If it is, CDH will not post-process and issue a warning and let ComicRN handle the post-processing, FIX: Fixed a utf-8 encoding error when 'Run script AFTER snatch' was enabled, FIX: (#2004) Possible fix for 500 error on Wanted tab, when series had an annual/special attached but was located in the incorrect table, FIX: Removed some logging lines when CDH is monitoring downloads

This commit is contained in:
evilhero 2018-06-30 14:35:52 -04:00
parent ba59f9e85a
commit 1a71893e3e
8 changed files with 106 additions and 36 deletions

View File

@ -85,7 +85,10 @@
issueid = issue['IssueID']
except:
lineheader = None
issuenumber = issue['Issue_Number']
try:
issuenumber = issue['Issue_Number']
except:
issuenumber = issue['IssueNumber']
issueid = issue['IssueID']
if any(d['IssueID'] == str(issue['IssueID']) for d in ann_list):

View File

@ -3007,7 +3007,7 @@ def search_queue(queue):
ss_queue = mylar.search.searchforissue(item['issueid'])
time.sleep(5) #arbitrary sleep to let the process attempt to finish pp'ing
if mylar.APILOCK is True:
if mylar.SEARCHLOCK is True:
logger.fdebug('[SEARCH-QUEUE] Another item is currently being searched....')
time.sleep(15)
else:

View File

@ -1277,13 +1277,13 @@ def updateissuedata(comicid, comicname=None, issued=None, comicIssues=None, call
lastpubdate = 'Present'
publishfigure = str(SeriesYear) + ' - ' + str(lastpubdate)
else:
if calledfrom == 'weeklycheck':
if len(issuedata) >= 1 and not calledfrom == 'dbupdate':
logger.fdebug('initiating issue updating - info & status')
issue_collection(issuedata, nostatus='False')
else:
logger.fdebug('initiating issue updating - just the info')
issue_collection(issuedata, nostatus='True')
#if calledfrom == 'weeklycheck':
if len(issuedata) >= 1 and not calledfrom == 'dbupdate':
logger.fdebug('initiating issue updating - info & status')
issue_collection(issuedata, nostatus='False')
else:
logger.fdebug('initiating issue updating - just the info')
issue_collection(issuedata, nostatus='True')
styear = str(SeriesYear)

View File

@ -106,6 +106,7 @@ class NZBGet(object):
return self.historycheck(nzbinfo)
stat = False
double_pp = False
while stat is False:
time.sleep(10)
queueinfo = self.server.listgroups()
@ -114,6 +115,26 @@ class NZBGet(object):
logger.fdebug('Item is no longer in active queue. It should be finished by my calculations')
stat = True
else:
if 'comicrn' in queuedl[0]['PostInfoText'].lower():
double_pp = True
if all([len(queuedl[0]['ScriptStatuses']) > 0, double_pp is False]):
for x in queuedl[0]['ScriptStatuses']:
if 'comicrn' in x['Name'].lower():
double_pp = True
break
if all([len(queuedl[0]['Parameters']) > 0, double_pp is False]):
for x in queuedl[0]['Parameters']:
if all(['comicrn' in x['Name'].lower(), x['Value'] == 'yes']):
double_pp = True
break
if double_pp is True:
logger.warn('ComicRN has been detected as being active for this category & download. Completed Download Handling will NOT be performed due to this.')
logger.warn('Either disable Completed Download Handling for NZBGet within Mylar, or remove ComicRN from your category script in NZBGet.')
return {'status': 'double-pp', 'failed': False}
logger.fdebug('status: %s' % queuedl[0]['Status'])
logger.fdebug('name: %s' % queuedl[0]['NZBName'])
logger.fdebug('FileSize: %sMB' % queuedl[0]['FileSizeMB'])
@ -130,9 +151,27 @@ class NZBGet(object):
history = self.server.history(True)
found = False
destdir = None
double_pp = False
hq = [hs for hs in history if hs['NZBID'] == nzbid and ('SUCCESS' in hs['Status'] or 'COPY' in hs['Status'])]
if len(hq) > 0:
logger.fdebug('found matching completed item in history. Job has a status of %s' % hq[0]['Status'])
if len(hq[0]['ScriptStatuses']) > 0:
for x in hq[0]['ScriptStatuses']:
if 'comicrn' in x['Name'].lower():
double_pp = True
break
if all([len(hq[0]['Parameters']) > 0, double_pp is False]):
for x in hq[0]['Parameters']:
if all(['comicrn' in x['Name'].lower(), x['Value'] == 'yes']):
double_pp = True
break
if double_pp is True:
logger.warn('ComicRN has been detected as being active for this category & download. Completed Download Handling will NOT be performed due to this.')
logger.warn('Either disable Completed Download Handling for NZBGet within Mylar, or remove ComicRN from your category script in NZBGet.')
return {'status': 'double-pp', 'failed': False}
if all(['SUCCESS' in hq[0]['Status'], hq[0]['DownloadedSizeMB'] == hq[0]['FileSizeMB']]):
logger.fdebug('%s has final file size of %sMB' % (hq[0]['Name'], hq[0]['DownloadedSizeMB']))
if os.path.isdir(hq[0]['DestDir']):

View File

@ -76,18 +76,23 @@ class SABnzbd(object):
logger.info('successfully queried the queue for status')
try:
queueinfo = queueresponse['queue']
logger.info('queue: %s' % queueresponse)
logger.info('Queue status : %s' % queueinfo['status'])
logger.info('Queue mbleft : %s' % queueinfo['mbleft'])
#logger.fdebug('queue: %s' % queueinfo)
logger.fdebug('Queue status : %s' % queueinfo['status'])
logger.fdebug('Queue mbleft : %s' % queueinfo['mbleft'])
while any([str(queueinfo['status']) == 'Downloading', str(queueinfo['status']) == 'Idle']) and float(queueinfo['mbleft']) > 0:
logger.info('queue_params: %s' % self.params['queue'])
#if 'comicrn' in queueinfo['script'].lower():
# logger.warn('ComicRN has been detected as being active for this category & download. Completed Download Handling will NOT be performed due to this.')
# logger.warn('Either disable Completed Download Handling for SABnzbd within Mylar, or remove ComicRN from your category script in SABnzbd.')
# return {'status': 'double-pp', 'failed': False}
#logger.fdebug('queue_params: %s' % self.params['queue'])
queue_resp = requests.get(self.sab_url, params=self.params['queue'], verify=False)
queueresp = queue_resp.json()
queueinfo = queueresp['queue']
logger.info('status: %s' % queueinfo['status'])
logger.info('mbleft: %s' % queueinfo['mbleft'])
logger.info('timeleft: %s' % queueinfo['timeleft'])
logger.info('eta: %s' % queueinfo['eta'])
logger.fdebug('status: %s' % queueinfo['status'])
logger.fdebug('mbleft: %s' % queueinfo['mbleft'])
logger.fdebug('timeleft: %s' % queueinfo['timeleft'])
logger.fdebug('eta: %s' % queueinfo['eta'])
time.sleep(5)
except Exception as e:
logger.warn('error: %s' % e)
@ -111,8 +116,13 @@ class SABnzbd(object):
try:
for hq in histqueue['slots']:
#logger.info('nzo_id: %s --- %s [%s]' % (hq['nzo_id'], sendresponse, hq['status']))
if hq['nzo_id'] == sendresponse and hq['status'] == 'Completed':
if hq['nzo_id'] == sendresponse and any([hq['status'] == 'Completed', hq['status'] == 'Running', 'comicrn' in hq['script'].lower()]):
logger.info('found matching completed item in history. Job has a status of %s' % hq['status'])
if 'comicrn' in hq['script'].lower():
logger.warn('ComicRN has been detected as being active for this category & download. Completed Download Handling will NOT be performed due to this.')
logger.warn('Either disable Completed Download Handling for SABnzbd within Mylar, or remove ComicRN from your category script in SABnzbd.')
return {'status': 'double-pp', 'failed': False}
if os.path.isfile(hq['storage']):
logger.info('location found @ %s' % hq['storage'])
found = {'status': True,
@ -145,6 +155,7 @@ class SABnzbd(object):
'apicall': True}
break
break
except Exception as e:
logger.warn('error %s' % e)
break

View File

@ -1784,7 +1784,7 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
links = entry['link']
searchresult = searcher(nzbprov, nzbname, mylar.COMICINFO, links, IssueID, ComicID, tmpprov, newznab=newznab_host, torznab=torznab_host, rss=RSS)
if searchresult == 'downloadchk-fail':
if searchresult == 'downloadchk-fail' or searchresult == 'double-pp':
foundc['status'] = False
continue
elif searchresult == 'torrent-fail' or searchresult == 'nzbget-fail' or searchresult == 'sab-fail' or searchresult == 'blackhole-fail':
@ -2718,8 +2718,8 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
if mylar.USE_NZBGET:
ss = nzbget.NZBGet()
send_to_nzbget = ss.sender(nzbpath)
if send_to_nzbget['status'] is True:
if mylar.CONFIG.NZBGET_CLIENT_POST_PROCESSING is True:
if mylar.CONFIG.NZBGET_CLIENT_POST_PROCESSING is True:
if send_to_nzbget['status'] is True:
send_to_nzbget['comicid'] = ComicID
if IssueID is not None:
send_to_nzbget['issueid'] = IssueID
@ -2727,9 +2727,11 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
send_to_nzbget['issueid'] = 'S' + IssueArcID
send_to_nzbget['apicall'] = True
mylar.NZB_QUEUE.put(send_to_nzbget)
else:
logger.warn('Unable to send nzb file to NZBGet. There was a parameter error as there are no values present: %s' % nzbget_params)
return "nzbget-fail"
elif send_to_nzbget['status'] == 'double-pp':
return send_to_nzbget['status']
else:
logger.warn('Unable to send nzb file to NZBGet. There was a parameter error as there are no values present: %s' % nzbget_params)
return "nzbget-fail"
if send_to_nzbget['status'] is True:
logger.info("Successfully sent nzb to NZBGet!")
@ -2862,6 +2864,10 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
sendtosab['apicall'] = True
logger.info('sendtosab: %s' % sendtosab)
mylar.NZB_QUEUE.put(sendtosab)
elif sendtosab['status'] == 'double-pp':
return sendtosab['status']
elif sendtosab['status'] is False:
return "sab-fail"
else:
logger.warn('Unable to send nzb file to SABnzbd. There was a parameter error as there are no values present: %s' % sab_params)
mylar.DOWNLOAD_APIKEY = None
@ -2889,7 +2895,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
'client_id': client_id,
'nzbname': nzbname,
'nzbpath': nzbpath},
'comicinfo': {'comicname': comicinfo[0]['ComicName'],
'comicinfo': {'comicname': comicinfo[0]['ComicName'].encode('utf-8'),
'volume': comicinfo[0]['ComicVolume'],
'comicid': ComicID,
'issueid': IssueID,

View File

@ -568,7 +568,7 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None,
#if ComicID[:1] == "G": mylar.importer.GCDimport(ComicID,pullupd='yes')
#else: mylar.importer.addComictoDB(ComicID,mismatch,pullupd='yes')
if 'annual' in ComicName.lower():
if any(['annual' in ComicName.lower(), 'special' in ComicName.lower()]):
myDB.upsert("annuals", values, control)
else:
myDB.upsert("issues", values, control)
@ -914,7 +914,7 @@ def foundsearch(ComicID, IssueID, mode=None, down=None, provider=None, SARC=None
logger.info(module + ' Updating Status (' + downstatus + ') now complete for ' + ComicName + ' issue: ' + IssueNum)
return
def forceRescan(ComicID, archive=None, module=None):
def forceRescan(ComicID, archive=None, module=None, recheck=False):
if module is None:
module = ''
module += '[FILE-RESCAN]'
@ -1068,7 +1068,7 @@ def forceRescan(ComicID, archive=None, module=None):
temploc = re.sub('[\#\']', '', temploc)
logger.fdebug(module + ' temploc: ' + temploc)
if 'annual' not in temploc.lower():
if all(['annual' not in temploc.lower(), 'special' not in temploc.lower()]):
#remove the extension here
extensions = ('.cbr', '.cbz', '.cb7')
if temploc.lower().endswith(extensions):
@ -1242,6 +1242,7 @@ def forceRescan(ComicID, archive=None, module=None):
old_status = reann['Status']
fcdigit = helpers.issuedigits(re.sub('annual', '', temploc.lower()).strip())
fcdigit = helpers.issuedigits(re.sub('special', '', temploc.lower()).strip())
if int(fcdigit) == int_iss and ANNComicID is not None:
logger.fdebug(module + ' [' + str(ANNComicID) + '] Annual match - issue : ' + str(int_iss))
@ -1364,17 +1365,17 @@ def forceRescan(ComicID, archive=None, module=None):
writeit = True
try:
if mylar.CONFIG.ANNUALS_ON:
if 'annual' in temploc.lower():
if any(['annual' in temploc.lower(), 'special' in temploc.lower()]):
if reann is None:
logger.fdebug(module + ' Annual present in location, but series does not have any annuals attached to it - Ignoring')
logger.fdebug(module + ' Annual/Special present in location, but series does not have any annuals attached to it - Ignoring')
writeit = False
else:
iss_id = reann['IssueID']
else:
iss_id = reiss['IssueID']
else:
if 'annual' in temploc.lower():
logger.fdebug(module + ' Annual support not enabled, but annual issue present within directory. Ignoring annual.')
if any(['annual' in temploc.lower(), 'special' in temploc.lower()]):
logger.fdebug(module + ' Annual support not enabled, but annual/special issue present within directory. Ignoring issue.')
writeit = False
else:
iss_id = reiss['IssueID']
@ -1550,7 +1551,7 @@ def forceRescan(ComicID, archive=None, module=None):
logger.warn(module + ' It looks like you have physical issues in the series directory, but are forcing these issues to an Archived Status. Adjusting have counts.')
havefiles = havefiles - arcfiles
thetotals = totals(ComicID, havefiles, combined_total, module)
thetotals = totals(ComicID, havefiles, combined_total, module, recheck=recheck)
totalarc = arcfiles + archivedissues
#enforce permissions
@ -1559,7 +1560,7 @@ def forceRescan(ComicID, archive=None, module=None):
filechecker.setperms(rescan['ComicLocation'])
logger.info(module + ' I have physically found ' + str(foundcount) + ' issues, ignored ' + str(ignorecount) + ' issues, snatched ' + str(snatchedcount) + ' issues, and accounted for ' + str(totalarc) + ' in an Archived state [ Total Issue Count: ' + str(havefiles) + ' / ' + str(combined_total) + ' ]')
def totals(ComicID, havefiles=None, totalfiles=None, module=None, issueid=None, file=None):
def totals(ComicID, havefiles=None, totalfiles=None, module=None, issueid=None, file=None, recheck=False):
if module is None:
module = '[FILE-RESCAN]'
myDB = db.DBConnection()
@ -1578,7 +1579,17 @@ def totals(ComicID, havefiles=None, totalfiles=None, module=None, issueid=None,
logger.fdebug('totalfiles: %s' % totalfiles)
logger.fdebug('status: %s' % hf['IssStatus'])
if hf['IssStatus'] != 'Downloaded':
havefiles = int(hf['Have']) +1
try:
havefiles = int(hf['Have']) +1
if havefiles > totalfiles and recheck is False:
recheck = True
return forceRescan(ComicID, recheck=recheck)
except TypeError:
if totalfiles == 1:
havefiles = 1
else:
logger.warn('Total issues for this series [ComiciD:%s/IssueID:%] is not 1 when it should be. This is probably a mistake and the series should be refreshed.' % (ComicID, IssueID))
havefiles = 0
logger.fdebug('incremented havefiles: %s' % havefiles)
else:
havefiles = int(hf['Have'])

View File

@ -1909,6 +1909,7 @@ class WebInterface(object):
myDB.action('DELETE FROM weekly WHERE weeknumber=? and year=?', [int(weeknumber), int(year)])
logger.info("Deleted existing pull-list data for week %s, %s. Now Recreating the Pull-list..." % (weeknumber, year))
weeklypull.pullit(forcecheck, weeknumber, year)
weeklypull.future_check()
pullrecreate.exposed = True
def upcoming(self):
@ -2107,7 +2108,6 @@ class WebInterface(object):
logger.fdebug('[DELETE] - ' + mvup['ComicName'] + ' issue #: ' + str(mvup['IssueNumber']))
deleteit = myDB.action("DELETE from upcoming WHERE ComicName=? AND IssueNumber=?", [mvup['ComicName'], mvup['IssueNumber']])
return serve_template(templatename="upcoming.html", title="Upcoming", upcoming=upcoming, issues=issues, ann_list=ann_list, futureupcoming=futureupcoming, future_nodata_upcoming=future_nodata_upcoming, futureupcoming_count=futureupcoming_count, upcoming_count=upcoming_count, wantedcount=wantedcount, isCounts=isCounts)
upcoming.exposed = True