From c3e305118f4c8c12e64e504cfd6195338736878b Mon Sep 17 00:00:00 2001 From: evilhero Date: Fri, 3 May 2013 21:57:19 -0400 Subject: [PATCH] FIX:(#368) Post-Processing and Comicvine URL Fix would not save settings properly, IMP: some more verbose logging for (#281) and removed some calls, FIX: Fixed some icon errors on the issue details page --- data/interfaces/default/artistredone.html | 6 +- data/interfaces/default/weeklypull.html | 1 + mylar/__init__.py | 11 ++- mylar/updater.py | 12 ++- mylar/webserve.py | 95 ++++++++++++++++------- mylar/weeklypull.py | 23 ++++-- 6 files changed, 101 insertions(+), 47 deletions(-) diff --git a/data/interfaces/default/artistredone.html b/data/interfaces/default/artistredone.html index 2fa9b5e9..5e0ac09d 100755 --- a/data/interfaces/default/artistredone.html +++ b/data/interfaces/default/artistredone.html @@ -332,15 +332,15 @@ linky = issue['Location'] except IOError as e: linky = None - %> %if linky: + %else: + %endif %else: - + %endif - %elif (issue['Status'] == 'Read'): %else: diff --git a/data/interfaces/default/weeklypull.html b/data/interfaces/default/weeklypull.html index dcf257b5..29a4d07d 100755 --- a/data/interfaces/default/weeklypull.html +++ b/data/interfaces/default/weeklypull.html @@ -9,6 +9,7 @@
Refresh Pull-list Recreate Pull-list + Downloaded to Dir
« Back to overview diff --git a/mylar/__init__.py b/mylar/__init__.py index cb993b8b..81c7e9fb 100755 --- a/mylar/__init__.py +++ b/mylar/__init__.py @@ -142,7 +142,7 @@ PUSHOVER_ONSNATCH = False SKIPPED2WANTED = False CVINFO = False LOG_LEVEL = None -POST_PROCESSING = True +POST_PROCESSING = 1 USE_SABNZBD = True SAB_HOST = None @@ -268,7 +268,7 @@ def initialize(): NEWZNAB, NEWZNAB_HOST, NEWZNAB_APIKEY, NEWZNAB_ENABLED, EXTRA_NEWZNABS,\ RAW, RAW_PROVIDER, RAW_USERNAME, RAW_PASSWORD, RAW_GROUPS, EXPERIMENTAL, \ PROWL_ENABLED, PROWL_PRIORITY, PROWL_KEYS, PROWL_ONSNATCH, NMA_ENABLED, NMA_APIKEY, NMA_PRIORITY, NMA_ONSNATCH, PUSHOVER_ENABLED, PUSHOVER_PRIORITY, PUSHOVER_APIKEY, PUSHOVER_USERKEY, PUSHOVER_ONSNATCH, \ - PREFERRED_QUALITY, MOVE_FILES, RENAME_FILES, LOWERCASE_FILENAMES, USE_MINSIZE, MINSIZE, USE_MAXSIZE, MAXSIZE, CORRECT_METADATA, FOLDER_FORMAT, FILE_FORMAT, REPLACE_CHAR, REPLACE_SPACES, ADD_TO_CSV, CVINFO, LOG_LEVEL, POST_PROCESSING, SEARCH_DELAY, GRABBAG_DIR, READ2FILENAME, CVURL, \ + PREFERRED_QUALITY, MOVE_FILES, RENAME_FILES, LOWERCASE_FILENAMES, USE_MINSIZE, MINSIZE, USE_MAXSIZE, MAXSIZE, CORRECT_METADATA, FOLDER_FORMAT, FILE_FORMAT, REPLACE_CHAR, REPLACE_SPACES, ADD_TO_CSV, CVINFO, LOG_LEVEL, POST_PROCESSING, SEARCH_DELAY, GRABBAG_DIR, READ2FILENAME, CVURL, CVAPIFIX, \ COMIC_LOCATION, QUAL_ALTVERS, QUAL_SCANNER, QUAL_TYPE, QUAL_QUALITY, ENABLE_EXTRA_SCRIPTS, EXTRA_SCRIPTS, ENABLE_PRE_SCRIPTS, PRE_SCRIPTS, PULLNEW, COUNT_ISSUES, COUNT_HAVES, COUNT_COMICS, SYNO_FIX, CHMOD_FILE, CHMOD_DIR, ANNUALS_ON, CV_ONLY, CV_ONETIMER if __INITIALIZED__: @@ -640,7 +640,6 @@ def launch_browser(host, port, root): def config_write(): new_config = ConfigObj() new_config.filename = CONFIG_FILE - new_config['General'] = {} new_config['General']['config_version'] = CONFIG_VERSION new_config['General']['http_port'] = HTTP_PORT @@ -709,7 +708,7 @@ def config_write(): new_config['General']['extra_scripts'] = EXTRA_SCRIPTS new_config['General']['enable_pre_scripts'] = int(ENABLE_PRE_SCRIPTS) new_config['General']['pre_scripts'] = PRE_SCRIPTS - new_config['General']['post_processing'] = POST_PROCESSING + new_config['General']['post_processing'] = int(POST_PROCESSING) new_config['SABnzbd'] = {} @@ -952,6 +951,10 @@ def dbcheck(): except sqlite3.OperationalError: c.execute('ALTER TABLE comics ADD COLUMN DetailURL TEXT') + try: + c.execute('SELECT ComicID from weekly') + except: + c.execute('ALTER TABLE weekly ADD COLUMN ComicID TEXT') #if it's prior to Wednesday, the issue counts will be inflated by one as the online db's everywhere #prepare for the next 'new' release of a series. It's caught in updater.py, so let's just store the diff --git a/mylar/updater.py b/mylar/updater.py index fe719b3b..22af4f3b 100755 --- a/mylar/updater.py +++ b/mylar/updater.py @@ -182,15 +182,20 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None) if issuechk is None: myDB.upsert("upcoming", newValue, controlValue) else: + logger.fdebug("--attempt to find errant adds to Wanted list") + logger.fdebug("UpcomingNewValue: " + str(newValue)) + logger.fdebug("UpcomingcontrolValue: " + str(controlValue)) myDB.upsert("issues", values, control) if issuechk['Status'] == 'Downloaded': logger.fdebug("updating Pull-list to reflect status.") - return issuechk['Status'] + downstats = {"Status": issuechk['Status'], + "ComicID": issuechk['ComicID']} + return downstats else: logger.fdebug("Issues don't match for some reason...weekly new issue: " + str(IssueNumber)) -def weekly_update(ComicName,IssueNumber,CStatus): +def weekly_update(ComicName,IssueNumber,CStatus,CID): # here we update status of weekly table... # added Issue to stop false hits on series' that have multiple releases in a week # added CStatus to update status flags on Pullist screen @@ -200,7 +205,8 @@ def weekly_update(ComicName,IssueNumber,CStatus): controlValue = { "COMIC": str(ComicName), "ISSUE": str(IssueNumber)} if CStatus: - newValue = {"STATUS": CStatus} + newValue = {"STATUS": CStatus, + "ComicID": CID} else: if mylar.AUTOWANT_UPCOMING: newValue = {"STATUS": "Wanted"} diff --git a/mylar/webserve.py b/mylar/webserve.py index b8a1fbce..c92ff1ab 100755 --- a/mylar/webserve.py +++ b/mylar/webserve.py @@ -844,9 +844,9 @@ class WebInterface(object): "Status": "added", "ComicID": readlist['ComicID'], "Issue_Number": readlist['Issue_Number'], - "IssueDate": readlist['IssueDate'], - "SeriesYear": comicinfo['ComicYear'], - "ComicName": readlist['ComicName']} + "IssueDate": readlist['IssueDate'], + "SeriesYear": comicinfo['ComicYear'], + "ComicName": comicinfo['ComicName']} myDB.upsert("readlist", newval, ctrlval) logger.info("Added " + str(readlist['ComicName']) + " # " + str(readlist['Issue_Number']) + " to the Reading list.") @@ -1047,34 +1047,44 @@ class WebInterface(object): raise cherrypy.HTTPRedirect("history") clearhistory.exposed = True - def downloadLocal(self, IssueID=None, IssueArcID=None, ReadOrder=None): - print "tada" - print ("issueid: " + str(IssueID)) + def downloadLocal(self, IssueID=None, IssueArcID=None, ReadOrder=None, dir=None): myDB = db.DBConnection() - if IssueID: - issueDL = myDB.action("SELECT * FROM issues WHERE IssueID=?", [IssueID]).fetchone() - comicid = issueDL['ComicID'] - #print ("comicid: " + str(comicid)) - comic = myDB.action("SELECT * FROM comics WHERE ComicID=?", [comicid]).fetchone() - #---issue info - comicname = comic['ComicName'] - issuenum = issueDL['Issue_Number'] - issuedate = issueDL['IssueDate'] - seriesyear = comic['ComicYear'] - #--- - issueLOC = comic['ComicLocation'] - #print ("IssueLOC: " + str(issueLOC)) - issueFILE = issueDL['Location'] - #print ("IssueFILE: "+ str(issueFILE)) - issuePATH = os.path.join(issueLOC,issueFILE) - #print ("IssuePATH: " + str(issuePATH)) + issueDL = myDB.action("SELECT * FROM issues WHERE IssueID=?", [IssueID]).fetchone() + comicid = issueDL['ComicID'] + #print ("comicid: " + str(comicid)) + comic = myDB.action("SELECT * FROM comics WHERE ComicID=?", [comicid]).fetchone() + #---issue info + comicname = comic['ComicName'] + issuenum = issueDL['Issue_Number'] + issuedate = issueDL['IssueDate'] + seriesyear = comic['ComicYear'] + #--- + issueLOC = comic['ComicLocation'] + #print ("IssueLOC: " + str(issueLOC)) + issueFILE = issueDL['Location'] + #print ("IssueFILE: "+ str(issueFILE)) + issuePATH = os.path.join(issueLOC,issueFILE) + #print ("IssuePATH: " + str(issuePATH)) + + # if dir is None, it's a normal copy to cache kinda thing. + # if dir is a path, then it's coming from the pullist as the location to put all the weekly comics + if dir is not None: + dstPATH = dir + else: dstPATH = os.path.join(mylar.CACHE_DIR, issueFILE) - #print ("dstPATH: " + str(dstPATH)) + #print ("dstPATH: " + str(dstPATH)) + if IssueID: + ISnewValueDict = {'inCacheDIR': 'True', + 'Location': issueFILE} + if IssueArcID: if mylar.READ2FILENAME: #if it's coming from a StoryArc, check to see if we're appending the ReadingOrder to the filename ARCissueFILE = ReadOrder + "-" + issueFILE dstPATH = os.path.join(mylar.CACHE_DIR, ARCissueFILE) + ISnewValueDict = {'inCacheDIR': 'True', + 'Location': issueFILE} + # issueDL = myDB.action("SELECT * FROM readinglist WHERE IssueArcID=?", [IssueArcID]).fetchone() # storyarcid = issueDL['StoryArcID'] # #print ("comicid: " + str(comicid)) @@ -1086,6 +1096,7 @@ class WebInterface(object): # #print ("IssuePATH: " + str(issuePATH)) # dstPATH = os.path.join(mylar.CACHE_DIR, issueFILE) # #print ("dstPATH: " + str(dstPATH)) + try: shutil.copy2(issuePATH, dstPATH) except IOError as e: @@ -1094,10 +1105,15 @@ class WebInterface(object): logger.debug("sucessfully copied to cache...Enabling Download link") controlValueDict = {'IssueID': IssueID} - newValueDict = {'inCacheDIR': 'True', - 'Location': issueFILE} - myDB.upsert("readlist", newValueDict, controlValueDict) - myDB.upsert("issues", newValueDict, controlValueDict) + RLnewValueDict = {'inCacheDIR': 'True', + 'Location': issueFILE, + 'ComicID': comicid, + 'ComicName': comicname, + 'Issue_Number': issuenum, + 'SeriesYear': seriesyear, + 'IssueDate': issuedate} + myDB.upsert("readlist", RLnewValueDict, controlValueDict) + myDB.upsert("issues", ISnewValueDict, controlValueDict) if IssueArcID: controlValueD = {'IssueArcID': IssueArcID} newValueDict = {'inCacheDIR': 'True', @@ -1106,6 +1122,28 @@ class WebInterface(object): #print("DB updated - Download link now enabled.") downloadLocal.exposed = True + + def MassWeeklyDownload(self): + # this will download all downloaded comics from the weekly pull list and throw them + # into a 'weekly' pull folder for those wanting to transfer directly to a 3rd party device. + myDB = db.DBConnection() + clist = myDB.select("SELECT * FROM Weekly WHERE Status='Downloaded'") + if clist is None: # nothing on the list, just go go gone + logger.info("There aren't any issues downloaded from this week yet.") + else: + for cl in clist: + cl['ComicID'] #downloaded & validated ComicID + isslist = myDB.select("SELECT * FROM Issues WHERE ComicID=? AND Status='Downloaded'", [cl['ComicID']]) + if isslist is None: pass # no issues found for comicid - boo/boo + else: + for iss in isslist: + #go through issues downloaded until found one we want. + if iss['Issue_Number'] == cl['ISSUE']: + self.downloadLocal(iss['IssueID'], dir=mylar.GRABBAG_DIR) + logger.info("Copied " + iss['ComicName'] + " #" + str(iss['Issue_Number']) + " to " + dir ) + break + + MassWeeklyDownload.exposed = True #for testing. def idirectory(self): @@ -1363,7 +1401,6 @@ class WebInterface(object): COUNT_HAVES = CHAVES[0][0] COUNT_ISSUES = CISSUES[0][0] COUNT_SIZE = helpers.human_size(CSIZE[0][0]) - comicinfo = { "COUNT_COMICS" : COUNT_COMICS, "COUNT_HAVES" : COUNT_HAVES, "COUNT_ISSUES" : COUNT_ISSUES, diff --git a/mylar/weeklypull.py b/mylar/weeklypull.py index dd58d125..fa173c39 100755 --- a/mylar/weeklypull.py +++ b/mylar/weeklypull.py @@ -371,6 +371,7 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None): cur = con.cursor() # if it's a one-off check (during an add series), load the comicname here and ignore below. if comic1off_name: + logger.fdebug("this is a one-off" + str(comic1off_name)) lines.append(comic1off_name.strip()) unlines.append(comic1off_name.strip()) comicid.append(comic1off_id) @@ -412,12 +413,12 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None): while (cnt > -1): lines[cnt] = lines[cnt].upper() #llen[cnt] = str(llen[cnt]) - logger.fdebug("looking for : " + str(lines[cnt])) + #logger.fdebug("looking for : " + str(lines[cnt])) sqlsearch = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\&\'\?\@]', ' ', lines[cnt]) sqlsearch = re.sub(r'\s', '%', sqlsearch) if 'THE' in sqlsearch: sqlsearch = re.sub('THE', '', sqlsearch) if '+' in sqlsearch: sqlsearch = re.sub('\+', '%PLUS%', sqlsearch) - logger.fdebug("searchsql: " + str(sqlsearch)) + #logger.fdebug("searchsql: " + str(sqlsearch)) weekly = myDB.select('SELECT PUBLISHER, ISSUE, COMIC, EXTRA, SHIPDATE FROM weekly WHERE COMIC LIKE (?)', [sqlsearch]) #cur.execute('SELECT PUBLISHER, ISSUE, COMIC, EXTRA, SHIPDATE FROM weekly WHERE COMIC LIKE (?)', [lines[cnt]]) for week in weekly: @@ -425,7 +426,7 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None): break for nono in not_t: if nono in week['PUBLISHER']: - logger.fdebug("nono present") + #logger.fdebug("nono present") break if nono in week['ISSUE']: #logger.fdebug("graphic novel/tradeback detected..ignoring.") @@ -457,11 +458,11 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None): #thnx to A+X for this... if '+' in watchcomic: logger.fdebug("+ detected...adjusting.") - logger.fdebug("comicnm:" + comicnm) - logger.fdebug("watchcomic:" + watchcomic) + #logger.fdebug("comicnm:" + comicnm) + #logger.fdebug("watchcomic:" + watchcomic) modwatchcomic = re.sub('\+', 'PLUS', modwatchcomic) - logger.fdebug("modcomicnm:" + modcomicnm) - logger.fdebug("modwatchcomic:" + modwatchcomic) + #logger.fdebug("modcomicnm:" + modcomicnm) + #logger.fdebug("modwatchcomic:" + modwatchcomic) if comicnm == watchcomic.upper() or modcomicnm == modwatchcomic.upper(): logger.fdebug("matched on:" + str(comicnm) + "..." + str(watchcomic).upper()) pass @@ -499,7 +500,13 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None): # here we add to upcoming table... statusupdate = updater.upcoming_update(ComicID=ComicID, ComicName=ComicName, IssueNumber=ComicIssue, IssueDate=ComicDate, forcecheck=forcecheck) # here we update status of weekly table... - updater.weekly_update(ComicName=week['COMIC'], IssueNumber=ComicIssue, CStatus=statusupdate) + if statusupdate is not None: + cstatus = statusupdate['Status'] + cstatusid = statusupdate['ComicID'] + else: + cstatus = None + cstatusid = None + updater.weekly_update(ComicName=week['COMIC'], IssueNumber=ComicIssue, CStatus=cstatus, CID=cstatusid) break break break