mirror of https://github.com/evilhero/mylar
FIX:(#668) URL for pull-list changed, IMP: Better matching when determining if a rebooted series is a valid watchlist match, FIX: Marking issues/annuals via checkboxes should be working again, FIX: Annuals couldn't be marked as Archived, FIX: Total Have count was off if any were Archived.
This commit is contained in:
parent
8b67d14bac
commit
4c2ec769d1
|
@ -902,7 +902,7 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None,c
|
|||
logger.fdebug('latestissue status: ' + chkstats['Status'])
|
||||
if chkstats['Status'] == 'Skipped' or chkstats['Status'] == 'Wanted' or chkstats['Status'] == 'Snatched':
|
||||
logger.info('Checking this week pullist for new issues of ' + comic['ComicName'])
|
||||
updater.newpullcheck(comic['ComicName'], comicid)
|
||||
updater.newpullcheck(comic['ComicName'], comicid, issue=latestiss)
|
||||
|
||||
#here we grab issues that have been marked as wanted above...
|
||||
|
||||
|
|
|
@ -194,6 +194,7 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None,
|
|||
return
|
||||
else:
|
||||
# if futurepull is not None, let's just update the status and ComicID
|
||||
# NOTE: THIS IS CREATING EMPTY ENTRIES IN THE FUTURE TABLE. ???
|
||||
nKey = {"ComicID": ComicID}
|
||||
nVal = {"Status": "Wanted"}
|
||||
myDB.upsert("future", nVal, nKey)
|
||||
|
@ -314,9 +315,9 @@ def weekly_update(ComicName,IssueNumber,CStatus,CID,futurepull=None,altissuenumb
|
|||
logger.info('updating control: ' + str(controlValue))
|
||||
myDB.upsert("future", newValue, controlValue)
|
||||
|
||||
def newpullcheck(ComicName, ComicID):
|
||||
def newpullcheck(ComicName, ComicID, issue=None):
|
||||
# When adding a new comic, let's check for new issues on this week's pullist and update.
|
||||
mylar.weeklypull.pullitcheck(ComicName, ComicID)
|
||||
mylar.weeklypull.pullitcheck(ComicName, ComicID, issue)
|
||||
return
|
||||
|
||||
def no_searchresults(ComicID):
|
||||
|
@ -806,11 +807,18 @@ def forceRescan(ComicID,archive=None):
|
|||
logger.info('Total files located: ' + str(havefiles))
|
||||
foundcount = havefiles
|
||||
arcfiles = 0
|
||||
arcanns = 0
|
||||
# if filechecker returns 0 files (it doesn't find any), but some issues have a status of 'Archived'
|
||||
# the loop below won't work...let's adjust :)
|
||||
arcissues = myDB.action("SELECT count(*) FROM issues WHERE ComicID=? and Status='Archived'", [ComicID]).fetchall()
|
||||
if int(arcissues[0][0]) > 0:
|
||||
arcfiles = arcissues[0][0]
|
||||
arcannuals = myDB.action("SELECT count(*) FROM annuals WHERE ComicID=? and Status='Archived'", [ComicID]).fetchall()
|
||||
if int(arcissues[0][0]) > 0:
|
||||
arcanns = arcannuals[0][0]
|
||||
|
||||
if arcfiles > 0 and arcanns > 0:
|
||||
arcfiles = arcfiles + arcanns
|
||||
havefiles = havefiles + arcfiles
|
||||
logger.fdebug('Adjusting have total to ' + str(havefiles) + ' because of this many archive files:' + str(arcfiles))
|
||||
|
||||
|
@ -826,6 +834,7 @@ def forceRescan(ComicID,archive=None):
|
|||
#adjust for issues that have been marked as Downloaded, but aren't found/don't exist.
|
||||
#do it here, because above loop only cycles though found comics using filechecker.
|
||||
downissues = myDB.select("SELECT * FROM issues WHERE ComicID=? and Status='Downloaded'", [ComicID])
|
||||
downissues += myDB.select("SELECT * FROM annuals WHERE ComicID=? and Status='Downloaded'", [ComicID])
|
||||
if downissues is None:
|
||||
pass
|
||||
else:
|
||||
|
|
|
@ -684,14 +684,17 @@ class WebInterface(object):
|
|||
if mi is None:
|
||||
if mylar.ANNUALS_ON:
|
||||
mi = myDB.action("SELECT * FROM annuals WHERE IssueID=?",[IssueID]).fetchone()
|
||||
comicname = mi['ReleaseComicName']
|
||||
annchk = 'yes'
|
||||
else:
|
||||
comicname = mi['ComicName']
|
||||
miyr = myDB.action("SELECT ComicYear FROM comics WHERE ComicID=?", [mi['ComicID']]).fetchone()
|
||||
if action == 'Downloaded':
|
||||
if mi['Status'] == "Skipped" or mi['Status'] == "Wanted":
|
||||
logger.info(u"Cannot change status to %s as comic is not Snatched or Downloaded" % (newaction))
|
||||
# continue
|
||||
elif action == 'Archived':
|
||||
logger.info(u"Marking %s %s as %s" % (mi['ComicName'], mi['Issue_Number'], newaction))
|
||||
logger.info(u"Marking %s %s as %s" % (comicname, mi['Issue_Number'], newaction))
|
||||
#updater.forceRescan(mi['ComicID'])
|
||||
issuestoArchive.append(IssueID)
|
||||
elif action == 'Wanted' or action == 'Retry':
|
||||
|
@ -726,8 +729,8 @@ class WebInterface(object):
|
|||
addArtists.exposed = True
|
||||
|
||||
def queueissue(self, mode, ComicName=None, ComicID=None, ComicYear=None, ComicIssue=None, IssueID=None, new=False, redirect=None, SeriesYear=None, SARC=None, IssueArcID=None):
|
||||
logger.fdebug('ComicID:' + str(ComicID))
|
||||
logger.fdebug('mode:' + str(mode))
|
||||
#logger.fdebug('ComicID:' + str(ComicID))
|
||||
#logger.fdebug('mode:' + str(mode))
|
||||
now = datetime.datetime.now()
|
||||
myDB = db.DBConnection()
|
||||
#mode dictates type of queue - either 'want' for individual comics, or 'series' for series watchlist.
|
||||
|
@ -781,7 +784,6 @@ class WebInterface(object):
|
|||
if mode == 'want':
|
||||
logger.info(u"Marking " + ComicName + " issue: " + ComicIssue + " as wanted...")
|
||||
myDB.upsert("issues", newStatus, controlValueDict)
|
||||
logger.info('Written to db.')
|
||||
else:
|
||||
logger.info(u"Marking " + ComicName + " Annual: " + ComicIssue + " as wanted...")
|
||||
myDB.upsert("annuals", newStatus, controlValueDict)
|
||||
|
@ -798,7 +800,7 @@ class WebInterface(object):
|
|||
issues = myDB.action("SELECT IssueDate, ReleaseDate FROM annuals WHERE IssueID=?", [IssueID]).fetchone()
|
||||
if ComicYear == None:
|
||||
ComicYear = str(issues['IssueDate'])[:4]
|
||||
if issues['ReleaseDate'] is None:
|
||||
if issues['ReleaseDate'] is None or issues['ReleaseDate'] == '0000-00-00':
|
||||
logger.info('No Store Date found for given issue. This is probably due to not Refreshing the Series beforehand.')
|
||||
logger.info('I Will assume IssueDate as Store Date, but you should probably Refresh the Series and try again if required.')
|
||||
storedate = issues['IssueDate']
|
||||
|
@ -829,9 +831,16 @@ class WebInterface(object):
|
|||
annchk = 'no'
|
||||
if issue is None:
|
||||
if mylar.ANNUALS_ON:
|
||||
issue = myDB.action('SELECT * FROM annuals WHERE IssueID=?', [IssueID]).fetchone()
|
||||
issann = myDB.action('SELECT * FROM annuals WHERE IssueID=?', [IssueID]).fetchone()
|
||||
comicname = issann['ReleaseComicName']
|
||||
issue = issann['Issue_Number']
|
||||
annchk = 'yes'
|
||||
logger.info(u"Marking " + issue['ComicName'] + " issue # " + issue['Issue_Number'] + " as skipped...")
|
||||
comicid = issann['ComicID']
|
||||
else:
|
||||
comicname = issue['ComicName']
|
||||
issue = issue['Issue_Number']
|
||||
comicid = issue['ComicID']
|
||||
logger.info(u"Marking " + comicname + " issue # " + str(issue) + " as Skipped...")
|
||||
controlValueDict = {"IssueID": IssueID}
|
||||
newValueDict = {"Status": "Skipped"}
|
||||
if annchk == 'yes':
|
||||
|
@ -871,16 +880,23 @@ class WebInterface(object):
|
|||
annchk = 'no'
|
||||
if issue is None:
|
||||
if mylar.ANNUALS_ON:
|
||||
issue = myDB.action('SELECT * FROM annuals WHERE IssueID=?', [IssueID]).fetchone()
|
||||
issann = myDB.action('SELECT * FROM annuals WHERE IssueID=?', [IssueID]).fetchone()
|
||||
comicname = issann['ReleaseComicName']
|
||||
issue = issann['Issue_Number']
|
||||
annchk = 'yes'
|
||||
logger.info(u"Marking " + issue['ComicName'] + " issue # " + issue['Issue_Number'] + " as archived...")
|
||||
comicid = issann['ComicID']
|
||||
else:
|
||||
comicname = issue['ComicName']
|
||||
issue = issue['Issue_Number']
|
||||
comicid = issue['ComicID']
|
||||
logger.info(u"Marking " + comicname + " issue # " + str(issue) + " as archived...")
|
||||
controlValueDict = {'IssueID': IssueID}
|
||||
newValueDict = {'Status': 'Archived'}
|
||||
if annchk == 'yes':
|
||||
myDB.upsert("annuals", newValueDict, controlValueDict)
|
||||
else:
|
||||
myDB.upsert("issues", newValueDict, controlValueDict)
|
||||
raise cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % issue['ComicID'])
|
||||
raise cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % comicid)
|
||||
archiveissue.exposed = True
|
||||
|
||||
|
||||
|
@ -1174,7 +1190,7 @@ class WebInterface(object):
|
|||
|
||||
wantedcount = iss_cnt + ann_cnt
|
||||
|
||||
#let's straightload the series that have no issue data associated as of yet (ie. new series) form the futurepulllist
|
||||
#let's straightload the series that have no issue data associated as of yet (ie. new series) from the futurepulllist
|
||||
future_nodata_upcoming = myDB.select('SELECT * FROM futureupcoming')
|
||||
|
||||
#let's move any items from the upcoming table into the wanted table if the date has already passed.
|
||||
|
@ -1197,6 +1213,7 @@ class WebInterface(object):
|
|||
myDB.upsert("issues", mvvalues, mvcontroldict)
|
||||
|
||||
#remove old entry from upcoming so it won't try to continually download again.
|
||||
logger.fdebug('[DELETE] - ' + mvup['ComicName'] + ' issue #: ' + str(mvup['IssueNumber']))
|
||||
deleteit = myDB.action("DELETE from upcoming WHERE ComicName=? AND IssueNumber=?", [mvup['ComicName'],mvup['IssueNumber']])
|
||||
|
||||
|
||||
|
@ -2311,9 +2328,9 @@ class WebInterface(object):
|
|||
"nzb_startup_search" : helpers.checked(mylar.NZB_STARTUP_SEARCH),
|
||||
"libraryscan_interval" : mylar.LIBRARYSCAN_INTERVAL,
|
||||
"search_delay" : mylar.SEARCH_DELAY,
|
||||
"nzb_downloader_sabnzbd" : helpers.radio(int(mylar.NZB_DOWNLOADER), 0),
|
||||
"nzb_downloader_nzbget" : helpers.radio(int(mylar.NZB_DOWNLOADER), 1),
|
||||
"nzb_downloader_blackhole" : helpers.radio(int(mylar.NZB_DOWNLOADER), 2),
|
||||
"nzb_downloader_sabnzbd" : helpers.radio(mylar.NZB_DOWNLOADER, 0),
|
||||
"nzb_downloader_nzbget" : helpers.radio(mylar.NZB_DOWNLOADER, 1),
|
||||
"nzb_downloader_blackhole" : helpers.radio(mylar.NZB_DOWNLOADER, 2),
|
||||
"sab_host" : mylar.SAB_HOST,
|
||||
"sab_user" : mylar.SAB_USERNAME,
|
||||
"sab_api" : mylar.SAB_APIKEY,
|
||||
|
|
|
@ -53,7 +53,7 @@ def pullit(forcecheck=None):
|
|||
logger.info(u"No pullist found...I'm going to try and get a new list now.")
|
||||
pulldate = '00000000'
|
||||
if pulldate is None: pulldate = '00000000'
|
||||
PULLURL = 'http://www.previewsworld.com/shipping/newreleases.txt'
|
||||
PULLURL = 'http://www.previewsworld.com/shipping/prevues/newreleases.txt'
|
||||
#PULLURL = 'http://www.previewsworld.com/Archive/GetFile/1/1/71/994/081512.txt'
|
||||
|
||||
#Prepare the Substitute name switch for pulllist to comic vine conversion
|
||||
|
@ -383,7 +383,7 @@ def pullit(forcecheck=None):
|
|||
os.remove( str(pullpath) + "newreleases.txt" )
|
||||
pullitcheck(forcecheck=forcecheck)
|
||||
|
||||
def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None, futurepull=None):
|
||||
def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None, futurepull=None, issue=None):
|
||||
if futurepull is None:
|
||||
logger.info(u"Checking the Weekly Releases list for comics I'm watching...")
|
||||
else:
|
||||
|
@ -432,6 +432,7 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None, futurepul
|
|||
lines.append(comic1off_name.strip())
|
||||
unlines.append(comic1off_name.strip())
|
||||
comicid.append(comic1off_id)
|
||||
latestissue.append(issue)
|
||||
w = 1
|
||||
else:
|
||||
#let's read in the comic.watchlist from the db here
|
||||
|
@ -667,14 +668,14 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None, futurepul
|
|||
if validcheck == False:
|
||||
if date_downloaded is None:
|
||||
break
|
||||
|
||||
latest_int = helpers.issuedigits(latestiss)
|
||||
weekiss_int = helpers.issuedigits(week['ISSUE'])
|
||||
logger.fdebug('comparing ' + str(latest_int) + ' to ' + str(weekiss_int))
|
||||
if (latest_int > weekiss_int) or (latest_int == 0 or weekiss_int == 0):
|
||||
logger.fdebug(str(week['ISSUE']) + ' should not be the next issue in THIS volume of the series.')
|
||||
logger.fdebug('it should be either greater than ' + str(latestiss) + ' or an issue #0')
|
||||
break
|
||||
if chktype == 'series':
|
||||
latest_int = helpers.issuedigits(latestiss)
|
||||
weekiss_int = helpers.issuedigits(week['ISSUE'])
|
||||
logger.fdebug('comparing ' + str(latest_int) + ' to ' + str(weekiss_int))
|
||||
if (latest_int > weekiss_int) or (latest_int == 0 or weekiss_int == 0):
|
||||
logger.fdebug(str(week['ISSUE']) + ' should not be the next issue in THIS volume of the series.')
|
||||
logger.fdebug('it should be either greater than ' + str(latestiss) + ' or an issue #0')
|
||||
break
|
||||
|
||||
else:
|
||||
#logger.fdebug('issuedate:' + str(datevalues[0]['issuedate']))
|
||||
|
|
Loading…
Reference in New Issue