FIX:(#341) Checks in PostProcessor for incorrect API key, FIX:(#385) unable to find original filename error verbosity, IMP: History now will show Downloaded Issues

This commit is contained in:
evilhero 2013-05-15 05:04:26 -04:00
parent 29a8f167aa
commit 342b9daa30
3 changed files with 80 additions and 39 deletions

View File

@ -156,8 +156,13 @@ class PostProcessor(object):
data = file.read()
file.close()
dom = parseString(data)
sabreps = dom.getElementsByTagName('replace_spaces')[0].firstChild.wholeText
try:
sabreps = dom.getElementsByTagName('replace_spaces')[0].firstChild.wholeText
except:
errorm = dom.getElementsByTagName('error')[0].firstChild.wholeText
logger.error(u"Error detected attempting to retrieve SAB data : " + errorm)
return
sabrepd = dom.getElementsByTagName('replace_dots')[0].firstChild.wholeText
logger.fdebug("SAB Replace Spaces: " + str(sabreps))
logger.fdebug("SAB Replace Dots: " + str(sabrepd))
@ -409,11 +414,17 @@ class PostProcessor(object):
'$VolumeN': comversion
}
ofilename = None
for root, dirnames, filenames in os.walk(self.nzb_folder):
for filename in filenames:
if filename.lower().endswith(extensions):
ofilename = filename
path, ext = os.path.splitext(ofilename)
if ofilename is None:
logger.error(u"Aborting PostProcessing - the filename doesn't exist in the location given. Make sure that " + str(self.nzb_folder) + " exists and is the correct location.")
return
self._log("Original Filename: " + ofilename, logger.DEBUG)
self._log("Original Extension: " + ext, logger.DEBUG)
logger.fdebug("Original Filname: " + str(ofilename))
@ -468,6 +479,8 @@ class PostProcessor(object):
self._log("Removed temporary directory : " + str(self.nzb_folder), logger.DEBUG)
#delete entry from nzblog table
myDB.action('DELETE from nzblog WHERE issueid=?', [issueid])
#update snatched table to change status to Downloaded
updater.foundsearch(comicid, issueid, down='True')
#force rescan of files
updater.forceRescan(comicid)
logger.info(u"Post-Processing completed for: " + series + " issue: " + str(issuenumOG) )

View File

@ -140,7 +140,8 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None):
#let's do the Annual check here.
if mylar.ANNUALS_ON:
annuals = comicbookdb.cbdb(comic['ComicName'], SeriesYear)
annualcomicname = re.sub('[\,\:]', '', comic['ComicName'])
annuals = comicbookdb.cbdb(annualcomicname, SeriesYear)
print ("Number of Annuals returned: " + str(annuals['totalissues']))
nb = 0
while (nb <= int(annuals['totalissues'])):
@ -267,6 +268,12 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None):
else:
comicVol = None
#for description ...
#Cdesc = helpers.cleanhtml(comic['ComicDescription'])
#cdes_find = Cdesc.find("Collected")
#cdes_removed = Cdesc[:cdes_find]
#print cdes_removed
controlValueDict = {"ComicID": comicid}
newValueDict = {"ComicName": comic['ComicName'],
"ComicSortName": sortname,
@ -276,6 +283,7 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None):
"ComicVersion": comicVol,
"ComicLocation": comlocation,
"ComicPublisher": comic['ComicPublisher'],
#"Description": Cdesc.decode('utf-8', 'replace'),
"DetailURL": comic['ComicURL'],
# "ComicPublished": gcdinfo['resultPublished'],
"ComicPublished": 'Unknown',
@ -530,15 +538,17 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None):
"IssueDate": issdate,
"Int_IssueNumber": int_issnum
}
if mylar.AUTOWANT_ALL:
newValueDict['Status'] = "Wanted"
elif issdate > helpers.today() and mylar.AUTOWANT_UPCOMING:
newValueDict['Status'] = "Wanted"
else:
newValueDict['Status'] = "Skipped"
if iss_exists:
#print ("Existing status : " + str(iss_exists['Status']))
newValueDict['Status'] = iss_exists['Status']
else:
if mylar.AUTOWANT_ALL:
newValueDict['Status'] = "Wanted"
elif issdate > helpers.today() and mylar.AUTOWANT_UPCOMING:
newValueDict['Status'] = "Wanted"
else:
newValueDict['Status'] = "Skipped"
try:
myDB.upsert("issues", newValueDict, controlValueDict)
@ -612,20 +622,26 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None):
# lets' check the pullist for anything at this time as well since we're here.
# do this for only Present comics....
if mylar.AUTOWANT_UPCOMING and lastpubdate == 'Present': #and 'Present' in gcdinfo['resultPublished']:
logger.info(u"Checking this week's pullist for new issues of " + comic['ComicName'])
updater.newpullcheck(comic['ComicName'], comicid)
print ("latestissue: #" + str(latestiss))
chkstats = myDB.action("SELECT * FROM issues WHERE ComicID=? AND Issue_Number=?", [comicid,str(latestiss)]).fetchone()
print chkstats['Status']
if chkstats['Status'] == 'Skipped' or chkstats['Status'] == 'Wanted' or chkstats['Status'] == 'Snatched':
logger.info(u"Checking this week's pullist for new issues of " + comic['ComicName'])
updater.newpullcheck(comic['ComicName'], comicid)
#here we grab issues that have been marked as wanted above...
#here we grab issues that have been marked as wanted above...
results = myDB.select("SELECT * FROM issues where ComicID=? AND Status='Wanted'", [comicid])
if results:
logger.info(u"Attempting to grab wanted issues for : " + comic['ComicName'])
results = myDB.select("SELECT * FROM issues where ComicID=? AND Status='Wanted'", [comicid])
if results:
logger.info(u"Attempting to grab wanted issues for : " + comic['ComicName'])
for result in results:
search.searchforissue(result['IssueID'])
else: logger.info(u"No issues marked as wanted for " + comic['ComicName'])
for result in results:
search.searchforissue(result['IssueID'])
else: logger.info(u"No issues marked as wanted for " + comic['ComicName'])
logger.info(u"Finished grabbing what I could.")
logger.info(u"Finished grabbing what I could.")
else:
logger.info(u"Already have the latest issue : #" + str(latestiss))
def GCDimport(gcomicid, pullupd=None,imported=None,ogcname=None):

View File

@ -244,7 +244,7 @@ def nzblog(IssueID, NZBName):
#print newValue
myDB.upsert("nzblog", newValue, controlValue)
def foundsearch(ComicID, IssueID):
def foundsearch(ComicID, IssueID, down=None):
# When doing a Force Search (Wanted tab), the resulting search calls this to update.
# this is all redudant code that forceRescan already does.
@ -258,24 +258,36 @@ def foundsearch(ComicID, IssueID):
issue = myDB.action('SELECT * FROM issues WHERE IssueID=?', [IssueID]).fetchone()
CYear = issue['IssueDate'][:4]
# update the status to Snatched (so it won't keep on re-downloading!)
logger.fdebug("updating status to snatched")
controlValue = {"IssueID": IssueID}
newValue = {"Status": "Snatched"}
myDB.upsert("issues", newValue, controlValue)
# update the snatched DB
controlValueDict = {"IssueID": IssueID}
newValueDict = {"Status": "Snatched"}
logger.fdebug("updating snatched db.")
myDB.upsert("issues", newValueDict, controlValueDict)
snatchedupdate = {"IssueID": IssueID}
newsnatchValues = {"ComicName": comic['ComicName'],
"ComicID": ComicID,
"Issue_Number": issue['Issue_Number'],
"DateAdded": helpers.now(),
"Status": "Snatched"
}
myDB.upsert("snatched", newsnatchValues, snatchedupdate)
if down is None:
# update the status to Snatched (so it won't keep on re-downloading!)
logger.fdebug("updating status to snatched")
controlValue = {"IssueID": IssueID}
newValue = {"Status": "Snatched"}
myDB.upsert("issues", newValue, controlValue)
# update the snatched DB
snatchedupdate = {"IssueID": IssueID,
"Status": "Snatched"
}
newsnatchValues = {"ComicName": comic['ComicName'],
"ComicID": ComicID,
"Issue_Number": issue['Issue_Number'],
"DateAdded": helpers.now(),
"Status": "Snatched"
}
myDB.upsert("snatched", newsnatchValues, snatchedupdate)
else:
snatchedupdate = {"IssueID": IssueID,
"Status": "Downloaded"
}
newsnatchValues = {"ComicName": comic['ComicName'],
"ComicID": ComicID,
"Issue_Number": issue['Issue_Number'],
"DateAdded": helpers.now(),
"Status": "Downloaded"
}
myDB.upsert("snatched", newsnatchValues, snatchedupdate)
#print ("finished updating snatched db.")
logger.info(u"Updating now complete for " + comic['ComicName'] + " issue: " + str(issue['Issue_Number']))