IMP: Added some extra handling for series on the weekly pull-list that have totals out of whack and was causing problems when filechecking new issues, FIX:(#794) Can't change annual status using checkboxes, IMP: If trying to view the details for an issue, if it was a cbz file and contained no metadata it would error out

This commit is contained in:
evilhero 2014-08-18 15:54:34 -04:00
parent 573242624c
commit e7216072f9
5 changed files with 84 additions and 58 deletions

View File

@ -436,9 +436,9 @@
${aninfo['annualComicName']}<a href="annualDelete?comicid=${comic['ComicID']}&ReleaseComicID=${aninfo['annualComicID']}"><img src="interfaces/default/images/x.png" height="10" width="10"/></a>
%endfor
<form action="markissues" method="get" id="markissues">
<div id="markissue">Mark selected annuals as
<select name="action" onChange="doAjaxCall('markissues',$(this),'table',true);" data-success="selected issues marked">
<form action="markannuals" method="get" id="markannuals">
<div id="markannuals">Mark selected annuals as
<select name="ann_action" form="markannuals" onChange="doAjaxCall('markannuals',$(this),'table',true);" data-success="selected issues marked">
<option disabled="disabled" selected="selected">Choose...</option>
<option value="Wanted">Wanted</option>
<option value="Skipped">Skipped</option>
@ -446,7 +446,6 @@
<option value="Archived">Archived</option>
<option value="Ignored">Ignored</option>
</select>
selected annuals
<input type="hidden" value="Go">
</div>
@ -465,7 +464,7 @@
-->
<thead>
<tr>
<th id="aselect" align="left"><input type="checkbox" onClick="toggle(this)" class="checkbox" /></th>
<th id="select" align="left"><input type="checkbox" onClick="toggle(this)" class="checkbox" /></th>
<th id="aint_issuenumber">Int_IssNumber</th>
<th id="aissuenumber">Number</th>
<th id="aissuename">Name</th>
@ -493,7 +492,7 @@
grade = 'A'
%>
<tr class="${annual['Status']} grade${grade}">
<td id="aselect"><input type="checkbox" name="${annual['IssueID']}" class="checkbox" value="${annual['IssueID']}" /></td>
<td id="select"><input type="checkbox" name="${annual['IssueID']}" class="checkbox" value="${annual['IssueID']}" /></td>
<%
if annual['Int_IssueNumber'] is None:
annual_Number = annual['Int_IssueNumber']

View File

@ -1160,34 +1160,33 @@ def IssueDetails(filelocation, IssueID=None):
issuedetails = []
if filelocation.endswith('.cbz'):
logger.info('CBZ file detected. Checking for .xml within file')
logger.fdebug('CBZ file detected. Checking for .xml within file')
shutil.copy( filelocation, dstlocation )
else:
logger.info('filename is not a cbz : ' + filelocation)
logger.fdebug('filename is not a cbz : ' + filelocation)
return
cover = "notfound"
issuetag = None
modtime = os.path.getmtime(dstlocation)
logger.info('file modtime set to : ' + str(modtime))
with zipfile.ZipFile(dstlocation, 'r') as inzipfile:
for infile in inzipfile.namelist():
if infile == 'ComicInfo.xml':
logger.info('Extracting ComicInfo.xml to display.')
logger.fdebug('Extracting ComicInfo.xml to display.')
dst = os.path.join(mylar.CACHE_DIR, 'ComicInfo.xml')
data = inzipfile.read(infile)
print str(data)
issuetag = 'xml'
elif '000.jpg' in infile or '000.png' in infile or '00.jpg' in infile:
logger.info('Extracting primary image ' + infile + ' as coverfile for display.')
logger.fdebug('Extracting primary image ' + infile + ' as coverfile for display.')
local_file = open(os.path.join(mylar.CACHE_DIR,'temp.jpg'), "wb")
local_file.write(inzipfile.read(infile))
local_file.close
cover = "found"
elif ('001.jpg' in infile or '001.png' in infile) and cover == "notfound":
logger.info('Extracting primary image ' + infile + ' as coverfile for display.')
logger.fdebug('Extracting primary image ' + infile + ' as coverfile for display.')
local_file = open(os.path.join(mylar.CACHE_DIR,'temp.jpg'), "wb")
local_file.write(inzipfile.read(infile))
local_file.close
@ -1196,6 +1195,7 @@ def IssueDetails(filelocation, IssueID=None):
ComicImage = os.path.join('cache', 'temp.jpg?'+str(modtime))
IssueImage = replacetheslash(ComicImage)
if issuetag is None:
import subprocess
from subprocess import CalledProcessError, check_output
@ -1207,8 +1207,9 @@ def IssueDetails(filelocation, IssueID=None):
issuetag = 'comment'
except CalledProcessError as e:
logger.warn('Unable to extract comment field from zipfile.')
#logger.info('data:' + str(data))
print 'data:' + str(data)
if issuetag == 'xml':
#import easy to use xml parser called minidom:
dom = parseString(data)
@ -1292,25 +1293,27 @@ def IssueDetails(filelocation, IssueID=None):
pagecount = result.getElementsByTagName('PageCount')[0].firstChild.wholeText
except:
pagecount = 0
logger.info("number of pages I counted: " + str(pagecount))
logger.fdebug("number of pages I counted: " + str(pagecount))
i = 0
while (i < int(pagecount)):
pageinfo = result.getElementsByTagName('Page')[i].attributes
attrib = pageinfo.getNamedItem('Image')
logger.info('Frontcover validated as being image #: ' + str(attrib.value))
logger.fdebug('Frontcover validated as being image #: ' + str(attrib.value))
att = pageinfo.getNamedItem('Type')
logger.info('pageinfo: ' + str(pageinfo))
logger.fdebug('pageinfo: ' + str(pageinfo))
if att.value == 'FrontCover':
logger.info('FrontCover detected. Extracting.')
logger.fdebug('FrontCover detected. Extracting.')
break
i+=1
else:
stripline = 'Archive: ' + dstlocation
data = re.sub(stripline, '', data.encode("utf-8"))
data = re.sub(stripline, '', data.encode("utf-8")).strip()
if data is None or data == '':
return
import ast
ast_data = ast.literal_eval(str(data))
lastmodified = ast_data['lastModified']
print lastmodified
dt = ast_data['ComicBookInfo/1.0']
publisher = dt['publisher']
year = dt['publicationYear']

View File

@ -960,12 +960,13 @@ def issue_collection(issuedata,nostatus):
# Only change the status & add DateAdded if the issue is already in the database
if iss_exists is None:
newValueDict['DateAdded'] = helpers.today()
datechk = re.sub('-','', issue['ReleaseDate']).strip() # converts date to 20140718 format
#logger.fdebug('issue #' + str(issue['Issue_Number']) + 'does not exist in db.')
if mylar.AUTOWANT_ALL:
newValueDict['Status'] = "Wanted"
#logger.fdebug('autowant all')
elif re.sub('-', '', issue['ReleaseDate']).strip() > nowtime and mylar.AUTOWANT_UPCOMING:
#logger.fdebug(str(re.sub('-', '', issue['ReleaseDate']).strip()) + ' > ' + str(nowtime))
elif int(datechk) >= int(nowtime) and mylar.AUTOWANT_UPCOMING:
#logger.fdebug(str(datechk) + ' >= ' + str(nowtime))
newValueDict['Status'] = "Wanted"
else:
newValueDict['Status'] = "Skipped"

View File

@ -25,7 +25,7 @@ import itertools
import mylar
from mylar import db, logger, helpers, filechecker
def dbUpdate(ComicIDList=None):
def dbUpdate(ComicIDList=None, calledfrom=None):
myDB = db.DBConnection()
#print "comicidlist:" + str(ComicIDList)
@ -34,7 +34,8 @@ def dbUpdate(ComicIDList=None):
else:
comiclist = ComicIDList
logger.info('Starting update for %i active comics' % len(comiclist))
if calledfrom is None:
logger.info('Starting update for %i active comics' % len(comiclist))
for comic in comiclist:
if ComicIDList is None:
@ -62,13 +63,20 @@ def dbUpdate(ComicIDList=None):
if not issues:
#if issues are None it's probably a bad refresh/maxed out API that resulted in the issue data
#getting wiped out and not refreshed. Setting whack=True will force a complete refresh.
logger.info('No issue data available. This is Whack.')
logger.fdebug('No issue data available. This is Whack.')
whack = True
else:
#check for series that are numerically out of whack (ie. 5/4)
logger.info('Checking how out of whack the series is.')
logger.fdebug('Checking how out of whack the series is.')
whack = helpers.havetotals(refreshit=ComicID)
if calledfrom == 'weekly':
if whack == True:
logger.info('Series is out of whack. Forcibly refreshing series to ensure everything is in order.')
return True
else:
return False
annload = [] #initiate the list here so we don't error out below.
if mylar.ANNUALS_ON:
@ -184,7 +192,8 @@ def dbUpdate(ComicIDList=None):
myDB.upsert("Issues", newVAL, ctrlVAL)
logger.info('I have added ' + str(len(newiss)) + ' new issues for this series that were not present before.')
forceRescan(ComicID)
else:
mylar.importer.addComictoDB(ComicID,mismatch,annload=annload)
@ -292,7 +301,15 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None,
if issuechk is not None:
if issuechk['Issue_Number'] == IssueNumber or issuechk['Issue_Number'] == altissuenumber:
logger.fdebug('Comic series already up-to-date ... no need to refresh at this time.')
#check for 'out-of-whack' series here.
whackness = dbUpdate([ComicID], calledfrom='weekly')
if whackness == True:
logger.fdebug('Comic series has an incorrect total count. Forcily refreshing series to ensure data is current.')
dbUpdate([ComicID])
issuechk = myDB.selectone("SELECT * FROM issues WHERE ComicID=? AND Int_IssueNumber=?", [ComicID, helpers.issuedigits(IssueNumber)]).fetchone()
else:
logger.fdebug('Comic series already up-to-date ... no need to refresh at this time.')
logger.fdebug('Available to be marked for download - checking...' + adjComicName + ' Issue: ' + str(issuechk['Issue_Number']))
logger.fdebug('...Existing status: ' + str(issuechk['Status']))
control = {"IssueID": issuechk['IssueID']}

View File

@ -731,6 +731,10 @@ class WebInterface(object):
force_rss.exposed = True
def markannuals(self, ann_action=None, **args):
self.markissues(action=ann_action, **args)
markannuals.exposed = True
def markissues(self, action=None, **args):
myDB = db.DBConnection()
issuesToAdd = []
@ -740,7 +744,7 @@ class WebInterface(object):
else:
newaction = action
for IssueID in args:
if IssueID is None or 'issue_table' in IssueID or 'history_table' in IssueID or 'manage_issues' in IssueID:
if IssueID is None or 'issue_table' in IssueID or 'history_table' in IssueID or 'manage_issues' in IssueID or 'issue_table_length' in IssueID:
continue
else:
mi = myDB.selectone("SELECT * FROM issues WHERE IssueID=?",[IssueID]).fetchone()
@ -752,25 +756,26 @@ class WebInterface(object):
annchk = 'yes'
else:
comicname = mi['ComicName']
miyr = myDB.selectone("SELECT ComicYear FROM comics WHERE ComicID=?", [mi['ComicID']]).fetchone()
if action == 'Downloaded':
if mi['Status'] == "Skipped" or mi['Status'] == "Wanted":
logger.info(u"Cannot change status to %s as comic is not Snatched or Downloaded" % (newaction))
logger.fdebug(u"Cannot change status to %s as comic is not Snatched or Downloaded" % (newaction))
continue
elif action == 'Archived':
logger.info(u"Marking %s %s as %s" % (comicname, mi['Issue_Number'], newaction))
logger.fdebug(u"Marking %s %s as %s" % (comicname, mi['Issue_Number'], newaction))
#updater.forceRescan(mi['ComicID'])
issuestoArchive.append(IssueID)
elif action == 'Wanted' or action == 'Retry':
if action == 'Retry': newaction = 'Wanted'
logger.info(u"Marking %s %s as %s" % (comicname, mi['Issue_Number'], newaction))
logger.fdebug(u"Marking %s %s as %s" % (comicname, mi['Issue_Number'], newaction))
issuesToAdd.append(IssueID)
elif action == 'Skipped':
logger.info(u"Marking " + str(IssueID) + " as Skipped")
logger.fdebug(u"Marking " + str(IssueID) + " as Skipped")
elif action == 'Clear':
myDB.action("DELETE FROM snatched WHERE IssueID=?", [IssueID])
elif action == 'Failed' and mylar.FAILED_DOWNLOAD_HANDLING:
logger.info('Marking [' + comicname + '] : ' + str(IssueID) + ' as Failed. Sending to failed download handler.')
logger.fdebug('Marking [' + comicname + '] : ' + str(IssueID) + ' as Failed. Sending to failed download handler.')
failedcomicid = mi['ComicID']
failedissueid = IssueID
break
@ -786,7 +791,7 @@ class WebInterface(object):
if len(issuestoArchive) > 0:
updater.forceRescan(mi['ComicID'])
if len(issuesToAdd) > 0:
logger.debug("Marking issues: %s as Wanted" % (issuesToAdd))
logger.fdebug("Marking issues: %s as Wanted" % (issuesToAdd))
threading.Thread(target=search.searchIssueIDList, args=[issuesToAdd]).start()
#if IssueID:
raise cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % mi['ComicID'])
@ -1887,30 +1892,30 @@ class WebInterface(object):
"IssueID": Arc_MS['IssueID']})
mode='series'
if yearRANGE is None:
sresults, explicit = mb.findComic(comicname, mode, issue=numissues, explicit='all')
else:
sresults, explicit = mb.findComic(comicname, mode, issue=numissues, limityear=yearRANGE, explicit='all')
type='comic'
# mode='series'
# if yearRANGE is None:
# sresults, explicit = mb.findComic(comicname, mode, issue=numissues, explicit='all')
# else:
# sresults, explicit = mb.findComic(comicname, mode, issue=numissues, limityear=yearRANGE, explicit='all')
# type='comic'
if len(sresults) == 1:
sr = sresults[0]
implog = implog + "only one result...automagik-mode enabled for " + displaycomic + " :: " + str(sr['comicid']) + "\n"
resultset = 1
#need to move the files here.
elif len(sresults) == 0 or len(sresults) is None:
implog = implog + "no results, removing the year from the agenda and re-querying.\n"
sresults, explicit = mb.findComic(ogcname, mode, issue=numissues, explicit='all') #ComicName, mode, issue=numissues)
if len(sresults) == 1:
sr = sresults[0]
implog = implog + "only one result...automagik-mode enabled for " + displaycomic + " :: " + str(sr['comicid']) + "\n"
resultset = 1
else:
resultset = 0
else:
implog = implog + "returning results to screen - more than one possibility.\n"
resultset = 0
# if len(sresults) == 1:
# sr = sresults[0]
# implog = implog + "only one result...automagik-mode enabled for " + displaycomic + " :: " + str(sr['comicid']) + "\n"
# resultset = 1
# #need to move the files here.
# elif len(sresults) == 0 or len(sresults) is None:
# implog = implog + "no results, removing the year from the agenda and re-querying.\n"
# sresults, explicit = mb.findComic(ogcname, mode, issue=numissues, explicit='all') #ComicName, mode, issue=numissues)
# if len(sresults) == 1:
# sr = sresults[0]
# implog = implog + "only one result...automagik-mode enabled for " + displaycomic + " :: " + str(sr['comicid']) + "\n"
# resultset = 1
# else:
# resultset = 0
# else:
# implog = implog + "returning results to screen - more than one possibility.\n"
# resultset = 0
@ -3433,7 +3438,8 @@ class WebInterface(object):
logger.warn('No issues physically exist within the series directory for me to (re)-tag.')
return
for ginfo in groupinfo:
self.manual_metatag(dirName, ginfo['IssueID'], os.path.join(dirName, ginfo['Location']))
logger.info('tagging : ' + str(ginfo))
self.manual_metatag(dirName, ginfo['IssueID'], os.path.join(dirName, ginfo['Location']), ComicID)
logger.info('Finished doing a complete series (re)tagging of metadata.')
group_metatag.exposed = True