mirror of https://github.com/evilhero/mylar
FIX: Hopefully addressed the issue where re-downloads of the pull-list occur after they've already been downloaded, FIX: Having duplicate issues within the series directory will now be handled properly based entirely on duplication settings in the GUI, FIX: Fixed a small bug when adding a series, would not populate the pull-list when the series was on said list, IMP: Removed Downloaded status as an available option to change issues to on the comic details page, FIX: Formatted multiple series diretories on the comic details page a bit better, IMP: Added different css colouring for Downloaded status on the weekly pull list, FIX: Annuals should now be appearing on the Manage Issues tab
This commit is contained in:
parent
f31f279add
commit
a692cfb745
|
@ -146,11 +146,11 @@
|
|||
archive_path = 'None'
|
||||
%>
|
||||
%if os.path.exists(archive_path) and archive_path != comic['ComicLocation']:
|
||||
<label><big>Directories</big><br/>
|
||||
<norm>Primary: ${comic['ComicLocation']}</norm></label>
|
||||
<norm>Secondary: ${archive_path}</norm></label>
|
||||
<label><big>Directories:</big></label><br/>
|
||||
<p class="tab"><norm>Primary: ${comic['ComicLocation']}</norm><br/>
|
||||
<norm>Secondary: ${archive_path}</norm></label></p>
|
||||
%else:
|
||||
<label><big>Directory</big><br/>
|
||||
<label><big>Directory:</big><br/>
|
||||
<norm>${comic['ComicLocation']}</norm></label>
|
||||
%endif
|
||||
%else:
|
||||
|
@ -343,7 +343,6 @@
|
|||
<option disabled="disabled" selected="selected">Choose...</option>
|
||||
<option value="Wanted">Wanted</option>
|
||||
<option value="Skipped">Skipped</option>
|
||||
<option value="Downloaded">Downloaded</option>
|
||||
<option value="Archived">Archived</option>
|
||||
<option value="Ignored">Ignored</option>
|
||||
%if mylar.FAILED_DOWNLOAD_HANDLING:
|
||||
|
|
|
@ -263,6 +263,21 @@ table.display tr.even.gradeU {
|
|||
background-color: #eee;
|
||||
}
|
||||
|
||||
table.display tr.odd.gradeP {
|
||||
background-color: #68FC68;
|
||||
}
|
||||
|
||||
table.display tr.even.gradeP {
|
||||
background-color: #68FC68;
|
||||
}
|
||||
|
||||
table.display tr.odd.gradeD {
|
||||
background-color: #C7EFC7;
|
||||
}
|
||||
|
||||
table.display tr.even.gradeD {
|
||||
background-color: #C7EFC7;
|
||||
}
|
||||
|
||||
table.display tr.odd.gradeZ {
|
||||
background-color: #FAFAFA;
|
||||
|
@ -280,6 +295,8 @@ table.display tr.gradeA td,
|
|||
table.display tr.gradeC td,
|
||||
table.display tr.gradeX td,
|
||||
table.display tr.gradeU td,
|
||||
table.display tr.gradeP td,
|
||||
table.display tr.gradeD td,
|
||||
table.display tr.gradeZ td {border-bottom: 1px solid #FFF;}
|
||||
table.display tr:last-child td {
|
||||
border-bottom: 1px solid #eee;
|
||||
|
|
|
@ -143,6 +143,9 @@ h3 {
|
|||
p.center {
|
||||
text-align: center;
|
||||
}
|
||||
p.tab {
|
||||
margin-left: 40px;
|
||||
}
|
||||
hr {
|
||||
border: 0;
|
||||
border-top: 1px solid #cccccc;
|
||||
|
|
|
@ -67,7 +67,20 @@
|
|||
%for issue in issues:
|
||||
<tr>
|
||||
<td id="select"><input type="checkbox" name="${issue['IssueID']}" class="checkbox" /></td>
|
||||
<td id="name"><span title="${issue['ComicName']}"></span><a href="comicDetails?ComicID=${issue['ComicID']}">${issue['ComicName']}</a></td>
|
||||
<%
|
||||
try:
|
||||
if issue['ReleaseComicName']:
|
||||
annual = True
|
||||
else:
|
||||
annual = False
|
||||
except:
|
||||
annual = False
|
||||
%>
|
||||
%if annual:
|
||||
<td id="name"><span title="${issue['ReleaseComicName']}"></span><a href="comicDetails?ComicID=${issue['ComicID']}">${issue['ReleaseComicName']}</a></td>
|
||||
%else:
|
||||
<td id="name"><span title="${issue['ComicName']}"></span><a href="comicDetails?ComicID=${issue['ComicID']}">${issue['ComicName']}</a></td>
|
||||
%endif
|
||||
<td id="int_issue">${issue['Int_IssueNumber']}</td>
|
||||
<td id="issue">${issue['Issue_Number']}</td>
|
||||
<td id="pubdate">${issue['IssueDate']}</td>
|
||||
|
|
|
@ -61,6 +61,8 @@
|
|||
grade = 'X'
|
||||
elif weekly['STATUS'] == 'Snatched':
|
||||
grade = 'C'
|
||||
elif weekly['STATUS'] == 'Downloaded':
|
||||
grade = 'D'
|
||||
else:
|
||||
grade = 'A'
|
||||
if weekly['AUTOWANT'] == True:
|
||||
|
|
|
@ -527,6 +527,7 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None,c
|
|||
latestiss = issuedata['LatestIssue']
|
||||
latestdate = issuedata['LatestDate']
|
||||
lastpubdate = issuedata['LastPubDate']
|
||||
series_status = issuedata['SeriesStatus']
|
||||
#move the files...if imported is not empty & not futurecheck (meaning it's not from the mass importer.)
|
||||
if imported is None or imported == 'None' or imported == 'futurecheck':
|
||||
pass
|
||||
|
@ -545,36 +546,60 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None,c
|
|||
statafter = myDB.selectone("SELECT * FROM issues WHERE ComicID=? AND Issue_Number=?", [comicid,str(latestiss)]).fetchone()
|
||||
logger.fdebug('issue: ' + str(latestiss) + ' status after chk :' + str(statafter['Status']))
|
||||
|
||||
logger.fdebug('pullupd: ' + str(pullupd))
|
||||
logger.fdebug('lastpubdate: ' + str(lastpubdate))
|
||||
logger.fdebug('series_status: ' + str(series_status))
|
||||
if pullupd is None:
|
||||
# lets' check the pullist for anything at this time as well since we're here.
|
||||
# do this for only Present comics....
|
||||
if mylar.AUTOWANT_UPCOMING and lastpubdate == 'Present' and series_status == 'Active': #and 'Present' in gcdinfo['resultPublished']:
|
||||
logger.fdebug('latestissue: #' + str(latestiss))
|
||||
chkstats = myDB.selectone("SELECT * FROM issues WHERE ComicID=? AND Issue_Number=?", [comicid,str(latestiss)]).fetchone()
|
||||
logger.fdebug('latestissue status: ' + chkstats['Status'])
|
||||
if chkstats['Status'] == 'Skipped' or chkstats['Status'] == 'Wanted' or chkstats['Status'] == 'Snatched':
|
||||
logger.info('Checking this week pullist for new issues of ' + comic['ComicName'])
|
||||
if comic['ComicName'] != comicname_filesafe:
|
||||
cn_pull = comicname_filesafe
|
||||
else:
|
||||
cn_pull = comic['ComicName']
|
||||
updater.newpullcheck(cn_pull, comicid, issue=latestiss)
|
||||
if chkstats is None:
|
||||
if mylar.ANNUALS_ON:
|
||||
chkstats = myDB.selectone("SELECT * FROM annuals WHERE ComicID=? AND Issue_Number=?", [comicid, latestiss]).fetchone()
|
||||
|
||||
#here we grab issues that have been marked as wanted above...
|
||||
|
||||
results = myDB.select("SELECT * FROM issues where ComicID=? AND Status='Wanted'", [comicid])
|
||||
if results:
|
||||
logger.info('Attempting to grab wanted issues for : ' + comic['ComicName'])
|
||||
|
||||
for result in results:
|
||||
logger.fdebug('Searching for : ' + str(result['Issue_Number']))
|
||||
logger.fdebug('Status of : ' + str(result['Status']))
|
||||
search.searchforissue(result['IssueID'])
|
||||
else: logger.info('No issues marked as wanted for ' + comic['ComicName'])
|
||||
if chkstats:
|
||||
logger.fdebug('latestissue status: ' + chkstats['Status'])
|
||||
if chkstats['Status'] == 'Skipped' or chkstats['Status'] == 'Wanted' or chkstats['Status'] == 'Snatched':
|
||||
logger.info('Checking this week pullist for new issues of ' + comic['ComicName'])
|
||||
if comic['ComicName'] != comicname_filesafe:
|
||||
cn_pull = comicname_filesafe
|
||||
else:
|
||||
cn_pull = comic['ComicName']
|
||||
updater.newpullcheck(ComicName=cn_pull,ComicID=comicid,issue=latestiss)
|
||||
|
||||
logger.info('Finished grabbing what I could.')
|
||||
else:
|
||||
logger.info('Already have the latest issue : #' + str(latestiss))
|
||||
#here we grab issues that have been marked as wanted above...
|
||||
results = []
|
||||
issresults = myDB.select("SELECT * FROM issues where ComicID=? AND Status='Wanted'", [comicid])
|
||||
if issresults:
|
||||
for issr in issresults:
|
||||
results.append({'IssueID': issr['IssueID'],
|
||||
'Issue_Number': issr['Issue_Number'],
|
||||
'Status': issr['Status']
|
||||
})
|
||||
if mylar.ANNUALS_ON:
|
||||
an_results = myDB.select("SELECT * FROM annuals WHERE ComicID=? AND Status='Wanted'", [comicid])
|
||||
if an_results:
|
||||
for ar in an_results:
|
||||
results.append({'IssueID': ar['IssueID'],
|
||||
'Issue_Number': ar['Issue_Number'],
|
||||
'Status': ar['Status']
|
||||
})
|
||||
|
||||
|
||||
if results:
|
||||
logger.info('Attempting to grab wanted issues for : ' + comic['ComicName'])
|
||||
|
||||
for result in results:
|
||||
logger.fdebug('Searching for : ' + str(result['Issue_Number']))
|
||||
logger.fdebug('Status of : ' + str(result['Status']))
|
||||
search.searchforissue(result['IssueID'])
|
||||
else: logger.info('No issues marked as wanted for ' + comic['ComicName'])
|
||||
|
||||
logger.info('Finished grabbing what I could.')
|
||||
else:
|
||||
logger.info('Already have the latest issue : #' + str(latestiss))
|
||||
|
||||
if chkwant is not None:
|
||||
#if this isn't None, this is being called from the futureupcoming list
|
||||
|
@ -1350,6 +1375,7 @@ def updateissuedata(comicid, comicname=None, issued=None, comicIssues=None, call
|
|||
importantdates['LatestIssue'] = latestiss
|
||||
importantdates['LatestDate'] = latestdate
|
||||
importantdates['LastPubDate'] = lastpubdate
|
||||
importantdates['SeriesStatus'] = 'Active'
|
||||
|
||||
if calledfrom == 'weekly':
|
||||
return weeklyissue_check
|
||||
|
|
|
@ -264,6 +264,7 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None,
|
|||
issuechk = myDB.selectone("SELECT * FROM issues WHERE ComicID=? AND Int_IssueNumber=?", [ComicID, helpers.issuedigits(altissuenumber)]).fetchone()
|
||||
if issuechk is None:
|
||||
if futurepull is None:
|
||||
og_status = None
|
||||
logger.fdebug(adjComicName + ' Issue: ' + str(IssueNumber) + ' not present in listings to mark for download...updating comic and adding to Upcoming Wanted Releases.')
|
||||
# we need to either decrease the total issue count, OR indicate that an issue is upcoming.
|
||||
upco_results = myDB.select("SELECT COUNT(*) FROM UPCOMING WHERE ComicID=?",[ComicID])
|
||||
|
@ -298,36 +299,53 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None,
|
|||
nKey = {"ComicID": ComicID}
|
||||
nVal = {"Status": "Wanted"}
|
||||
myDB.upsert("future", nVal, nKey)
|
||||
return
|
||||
|
||||
if issuechk is not None:
|
||||
if issuechk['Issue_Number'] == IssueNumber or issuechk['Issue_Number'] == altissuenumber:
|
||||
og_status = issuechk['Status']
|
||||
#check for 'out-of-whack' series here.
|
||||
whackness = dbUpdate([ComicID], calledfrom='weekly')
|
||||
if whackness == True:
|
||||
if any( [issuechk['Status'] == 'Downloaded', issuechk['Status'] == 'Archived', issuechk['Status'] == 'Snatched'] ):
|
||||
logger.fdebug('Forcibly maintaining status of : ' + og_status + ' for #' + issuechk['Issue_Number'] + ' to ensure integrity.')
|
||||
logger.fdebug('Comic series has an incorrect total count. Forcily refreshing series to ensure data is current.')
|
||||
dbUpdate([ComicID])
|
||||
issuechk = myDB.selectone("SELECT * FROM issues WHERE ComicID=? AND Int_IssueNumber=?", [ComicID, helpers.issuedigits(IssueNumber)]).fetchone()
|
||||
if issuechk['Status'] != og_status and (issuechk['Status'] != 'Downloaded' or issuechk['Status'] != 'Archived' or issuechk['Status'] != 'Snatched'):
|
||||
logger.fdebug('Forcibly changing status of ' + issuechk['Status'] + ' back to ' + og_status + ' for #' + issuechk['Issue_Number'] + ' to stop repeated downloads.')
|
||||
else:
|
||||
logger.fdebug('[' + issuechk['Status'] + '] / [' + og_status + '] Status has not changed during refresh or is marked as being Wanted/Skipped correctly.')
|
||||
og_status = issuechk['Status']
|
||||
else:
|
||||
logger.fdebug('Comic series already up-to-date ... no need to refresh at this time.')
|
||||
|
||||
logger.fdebug('Available to be marked for download - checking...' + adjComicName + ' Issue: ' + str(issuechk['Issue_Number']))
|
||||
logger.fdebug('...Existing status: ' + str(issuechk['Status']))
|
||||
logger.fdebug('...Existing status: ' + og_status)
|
||||
control = {"IssueID": issuechk['IssueID']}
|
||||
newValue['IssueID'] = issuechk['IssueID']
|
||||
if issuechk['Status'] == "Snatched":
|
||||
if og_status == "Snatched":
|
||||
values = { "Status": "Snatched"}
|
||||
newValue['Status'] = "Snatched"
|
||||
elif issuechk['Status'] == "Downloaded":
|
||||
elif og_status == "Downloaded":
|
||||
values = { "Status": "Downloaded"}
|
||||
newValue['Status'] = "Downloaded"
|
||||
#if the status is Downloaded and it's on the pullist - let's mark it so everyone can bask in the glory
|
||||
|
||||
elif issuechk['Status'] == "Wanted":
|
||||
elif og_status == "Wanted":
|
||||
values = { "Status": "Wanted"}
|
||||
newValue['Status'] = "Wanted"
|
||||
elif issuechk['Status'] == "Archived":
|
||||
elif og_status == "Archived":
|
||||
values = { "Status": "Archived"}
|
||||
newValue['Status'] = "Archived"
|
||||
elif og_status == 'Failed':
|
||||
if mylar.FAILED_DOWNLOAD_HANDLING:
|
||||
if mylar.FAILED_AUTO:
|
||||
values = { "Status": "Wanted" }
|
||||
else:
|
||||
values = { "Status": "Failed" }
|
||||
else:
|
||||
values = { "Status": "Skipped" }
|
||||
else:
|
||||
values = { "Status": "Skipped"}
|
||||
newValue['Status'] = "Skipped"
|
||||
|
@ -338,18 +356,18 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None,
|
|||
|
||||
if mylar.AUTOWANT_UPCOMING:
|
||||
#for issues not in db - to be added to Upcoming table.
|
||||
if issuechk is None:
|
||||
if og_status is None:
|
||||
newValue['Status'] = "Wanted"
|
||||
logger.fdebug('...Changing Status to Wanted and throwing it in the Upcoming section since it is not published yet.')
|
||||
#this works for issues existing in DB...
|
||||
elif issuechk['Status'] == "Skipped":
|
||||
elif og_status == "Skipped":
|
||||
newValue['Status'] = "Wanted"
|
||||
values = {"Status": "Wanted"}
|
||||
logger.fdebug('...New status of Wanted')
|
||||
elif issuechk['Status'] == "Wanted":
|
||||
elif og_status == "Wanted":
|
||||
logger.fdebug('...Status already Wanted .. not changing.')
|
||||
else:
|
||||
logger.fdebug('...Already have issue - keeping existing status of : ' + str(issuechk['Status']))
|
||||
logger.fdebug('...Already have issue - keeping existing status of : ' + og_status)
|
||||
|
||||
if issuechk is None:
|
||||
myDB.upsert("upcoming", newValue, controlValue)
|
||||
|
@ -376,9 +394,9 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None,
|
|||
else:
|
||||
myDB.upsert("issues", values, control)
|
||||
|
||||
if issuechk['Status'] == 'Downloaded' or issuechk['Status'] == 'Archived' or issuechk['Status'] == 'Snatched':
|
||||
if any( [og_status == 'Downloaded', og_status == 'Archived', og_status == 'Snatched'] ):
|
||||
logger.fdebug('updating Pull-list to reflect status.')
|
||||
downstats = {"Status": issuechk['Status'],
|
||||
downstats = {"Status": og_status,
|
||||
"ComicID": issuechk['ComicID']}
|
||||
return downstats
|
||||
|
||||
|
@ -425,7 +443,7 @@ def weekly_update(ComicName,IssueNumber,CStatus,CID,futurepull=None,altissuenumb
|
|||
|
||||
def newpullcheck(ComicName, ComicID, issue=None):
|
||||
# When adding a new comic, let's check for new issues on this week's pullist and update.
|
||||
mylar.weeklypull.pullitcheck(ComicName, ComicID, issue)
|
||||
mylar.weeklypull.pullitcheck(comic1off_name=ComicName, comic1off_id=ComicID, issue=issue)
|
||||
return
|
||||
|
||||
def no_searchresults(ComicID):
|
||||
|
@ -805,25 +823,31 @@ def forceRescan(ComicID,archive=None,module=None):
|
|||
logger.fdebug('[DUPECHECK-CBZ PRIORITY] [#' + reiss['Issue_Number'] + '] Retaining newly scanned in filename : ' + tmpfc['ComicFilename'])
|
||||
removedupe = True
|
||||
|
||||
if mylar.DUPECONSTRAINT == 'filesize':
|
||||
if tmpfc['ComicSize'] <= di['filesize']:
|
||||
logger.fdebug('[DUPECHECK-FILESIZE PRIORITY] [#' + reiss['Issue_Number'] + '] Retaining currently scanned in filename : ' + di['filename'])
|
||||
issuedupe = "yes"
|
||||
break
|
||||
else:
|
||||
logger.fdebug('[DUPECHECK-FILESIZE PRIORITY] [#' + reiss['Issue_Number'] + '] Retaining newly scanned in filename : ' + tmpfc['ComicFilename'])
|
||||
removedupe = True
|
||||
|
||||
if removedupe:
|
||||
#need to remove the entry from issuedupechk so can add new one.
|
||||
#tuple(y for y in x if y) for x in a
|
||||
issuedupe_temp = []
|
||||
for x in issuedupechk:
|
||||
if x['filename'] != di['filename']:
|
||||
issuedupe_temp.append(x)
|
||||
issuedupechk = issuedupe_temp
|
||||
foundchk = False
|
||||
if mylar.DUPECONSTRAINT == 'filesize':
|
||||
if tmpfc['ComicSize'] <= di['filesize']:
|
||||
logger.fdebug('[DUPECHECK-FILESIZE PRIORITY] [#' + reiss['Issue_Number'] + '] Retaining currently scanned in filename : ' + di['filename'])
|
||||
issuedupe = "yes"
|
||||
break
|
||||
else:
|
||||
logger.fdebug('[DUPECHECK-FILESIZE PRIORITY] [#' + reiss['Issue_Number'] + '] Retaining newly scanned in filename : ' + tmpfc['ComicFilename'])
|
||||
removedupe = True
|
||||
|
||||
if removedupe:
|
||||
#need to remove the entry from issuedupechk so can add new one.
|
||||
#tuple(y for y in x if y) for x in a
|
||||
issuedupe_temp = []
|
||||
tmphavefiles = 0
|
||||
for x in issuedupechk:
|
||||
logger.fdebug('Comparing x: ' + x['filename'] + ' to di:' + di['filename'])
|
||||
if x['filename'] != di['filename']:
|
||||
logger.fdebug('Matched.')
|
||||
issuedupe_temp.append(x)
|
||||
tmphavefiles+=1
|
||||
issuedupechk = issuedupe_temp
|
||||
havefiles = tmphavefiles
|
||||
logger.fdebug(issuedupechk)
|
||||
foundchk = False
|
||||
break
|
||||
|
||||
if issuedupe == "no":
|
||||
|
||||
|
@ -1089,7 +1113,7 @@ def forceRescan(ComicID,archive=None,module=None):
|
|||
else:
|
||||
if mylar.MULTIPLE_DEST_DIRS is not None and mylar.MULTIPLE_DEST_DIRS != 'None':
|
||||
if os.path.exists(os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(rescan['ComicLocation']))):
|
||||
logger.info('Issues found within multiple destination directory location')
|
||||
#logger.fdebug('Issue(s) currently exist and found within multiple destination directory location')
|
||||
continue
|
||||
#print "Changing status from Downloaded to Archived - cannot locate file"
|
||||
controlValue = {"IssueID": down['IssueID']}
|
||||
|
|
|
@ -326,7 +326,6 @@ class WebInterface(object):
|
|||
logger.info('Attempting to add directly by ComicVineID: ' + str(comicid))
|
||||
if comicid.startswith('4050-'): comicid = re.sub('4050-','', comicid)
|
||||
threading.Thread(target=importer.addComictoDB, args=[comicid,mismatch,None,imported,ogcname]).start()
|
||||
print calledby
|
||||
if calledby == True or calledby == 'True':
|
||||
return
|
||||
elif calledby == 'web-import':
|
||||
|
|
|
@ -446,7 +446,7 @@ def pullit(forcecheck=None):
|
|||
os.remove( str(pullpath) + "newreleases.txt" )
|
||||
pullitcheck(forcecheck=forcecheck)
|
||||
|
||||
def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None, futurepull=None, issue=None):
|
||||
def pullitcheck(comic1off_name=None, comic1off_id=None, forcecheck=None, futurepull=None, issue=None):
|
||||
if futurepull is None:
|
||||
logger.info(u"Checking the Weekly Releases list for comics I'm watching...")
|
||||
else:
|
||||
|
@ -491,7 +491,7 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None, futurepul
|
|||
cur = con.cursor()
|
||||
# if it's a one-off check (during an add series), load the comicname here and ignore below.
|
||||
if comic1off_name:
|
||||
logger.fdebug("this is a one-off" + comic1off_name)
|
||||
logger.fdebug("This is a one-off for " + comic1off_name + '[ latest issue: ' + str(issue) + ' ]')
|
||||
lines.append(comic1off_name.strip())
|
||||
unlines.append(comic1off_name.strip())
|
||||
comicid.append(comic1off_id)
|
||||
|
|
Loading…
Reference in New Issue