diff --git a/data/interfaces/default/artistredone.html b/data/interfaces/default/artistredone.html index aadff5e9..2fa9b5e9 100755 --- a/data/interfaces/default/artistredone.html +++ b/data/interfaces/default/artistredone.html @@ -65,15 +65,43 @@ %if comic['ComicPublisher'] == 'DC Comics': - DC + DC %elif comic['ComicPublisher'] == 'Marvel': - Marvel + Marvel %elif comic['ComicPublisher'] == 'Image': - Image - %elif comic['ComicPublisher'] == 'Dark Horse Comics': - Darkhorse + Image + %elif comic['ComicPublisher'] == 'Dark Horse Comics' or comic['ComicPublisher'] == 'Dark Horse': + Darkhorse %elif comic['ComicPublisher'] == 'IDW Publishing': - IDW + IDW + %elif comic['ComicPublisher'] == 'Icon': + Icon + %elif comic['ComicPublisher'] == 'Red5': + Red5 + %elif comic['ComicPublisher'] == 'Vertigo': + Vertigo + %elif comic['ComicPublisher'] == 'ShadowLine': + Shadowline + %elif comic['ComicPublisher'] == 'Archie Comics': + Archie + %elif comic['ComicPublisher'] == 'Oni Press': + Oni Press + %elif comic['ComicPublisher'] == 'Tokyopop': + Tokyopop + %elif comic['ComicPublisher'] == 'Midtown Comics': + Midtown + %elif comic['ComicPublisher'] == 'Boom! Studios': + Boom! + %elif comic['ComicPublisher'] == 'Skybound': + Skybound + %elif comic['ComicPublisher'] == 'Vertigo': + Dynamite + %elif comic['ComicPublisher'] == 'Top Cow': + Top Cow + %elif comic['ComicPublisher'] == 'Dynamite Entertainment': + Dynamite + %elif comic['ComicPublisher'] == 'Cartoon Books': + Cartoon Books %endif
@@ -120,15 +148,15 @@ %if comic['ComicPublisher'] == 'DC Comics': - DC + DC %elif comic['ComicPublisher'] == 'Marvel': - Marvel + Marvel %elif comic['ComicPublisher'] == 'Image': - Image + Image %elif comic['ComicPublisher'] == 'Dark Horse Comics': - Darkhorse + Darkhorse %elif comic['ComicPublisher'] == 'IDW Publishing': - IDW + IDW %endif
@@ -180,15 +208,15 @@ %if comic['ComicPublisher'] == 'DC Comics': - DC + DC %elif comic['ComicPublisher'] == 'Marvel': - Marvel + Marvel %elif comic['ComicPublisher'] == 'Image': - Image + Image %elif comic['ComicPublisher'] == 'Dark Horse Comics': - Darkhorse + Darkhorse %elif comic['ComicPublisher'] == 'IDW Publishing': - IDW + IDW %endif
diff --git a/data/interfaces/default/css/style.css b/data/interfaces/default/css/style.css index 96384237..3d456c2a 100755 --- a/data/interfaces/default/css/style.css +++ b/data/interfaces/default/css/style.css @@ -917,7 +917,7 @@ div#artistheader h2 a { text-align: center; } #read_detail th#issueyear { - min-width: 75px; + min-width: 40px; text-align: center; } #read_detail td#comicname { @@ -933,7 +933,7 @@ div#artistheader h2 a { vertical-align: middle; } #read_detail td#issueyear{ - min-width: 75px; + min-width: 40px; text-align: left; vertical-align: middle; } diff --git a/data/interfaces/default/images/logo-imagecomics.gif b/data/interfaces/default/images/logo-imagecomics.gif deleted file mode 100755 index f2ed0b12..00000000 Binary files a/data/interfaces/default/images/logo-imagecomics.gif and /dev/null differ diff --git a/data/interfaces/default/images/publisherlogos/logo-archiecomics.jpg b/data/interfaces/default/images/publisherlogos/logo-archiecomics.jpg new file mode 100755 index 00000000..10460f3f Binary files /dev/null and b/data/interfaces/default/images/publisherlogos/logo-archiecomics.jpg differ diff --git a/data/interfaces/default/images/publisherlogos/logo-bluewatercomics.png b/data/interfaces/default/images/publisherlogos/logo-bluewatercomics.png new file mode 100755 index 00000000..dbc69059 Binary files /dev/null and b/data/interfaces/default/images/publisherlogos/logo-bluewatercomics.png differ diff --git a/data/interfaces/default/images/publisherlogos/logo-boom.jpg b/data/interfaces/default/images/publisherlogos/logo-boom.jpg new file mode 100755 index 00000000..083ff77f Binary files /dev/null and b/data/interfaces/default/images/publisherlogos/logo-boom.jpg differ diff --git a/data/interfaces/default/images/publisherlogos/logo-cartoonbooks.jpg b/data/interfaces/default/images/publisherlogos/logo-cartoonbooks.jpg new file mode 100755 index 00000000..13a2450a Binary files /dev/null and b/data/interfaces/default/images/publisherlogos/logo-cartoonbooks.jpg differ diff --git a/data/interfaces/default/images/logo-darkhorse.jpg b/data/interfaces/default/images/publisherlogos/logo-darkhorse.jpg similarity index 100% rename from data/interfaces/default/images/logo-darkhorse.jpg rename to data/interfaces/default/images/publisherlogos/logo-darkhorse.jpg diff --git a/data/interfaces/default/images/publisherlogos/logo-darkhorse.png b/data/interfaces/default/images/publisherlogos/logo-darkhorse.png new file mode 100755 index 00000000..343d5bd2 Binary files /dev/null and b/data/interfaces/default/images/publisherlogos/logo-darkhorse.png differ diff --git a/data/interfaces/default/images/logo-dccomics.png b/data/interfaces/default/images/publisherlogos/logo-dccomics.png similarity index 100% rename from data/interfaces/default/images/logo-dccomics.png rename to data/interfaces/default/images/publisherlogos/logo-dccomics.png diff --git a/data/interfaces/default/images/publisherlogos/logo-dynamite.png b/data/interfaces/default/images/publisherlogos/logo-dynamite.png new file mode 100755 index 00000000..c8ae14d2 Binary files /dev/null and b/data/interfaces/default/images/publisherlogos/logo-dynamite.png differ diff --git a/data/interfaces/default/images/publisherlogos/logo-iconcomics.png b/data/interfaces/default/images/publisherlogos/logo-iconcomics.png new file mode 100755 index 00000000..2085a309 Binary files /dev/null and b/data/interfaces/default/images/publisherlogos/logo-iconcomics.png differ diff --git a/data/interfaces/default/images/logo-idwpublish.png b/data/interfaces/default/images/publisherlogos/logo-idwpublish.png old mode 100644 new mode 100755 similarity index 100% rename from data/interfaces/default/images/logo-idwpublish.png rename to data/interfaces/default/images/publisherlogos/logo-idwpublish.png diff --git a/data/interfaces/default/images/publisherlogos/logo-imagecomics.png b/data/interfaces/default/images/publisherlogos/logo-imagecomics.png new file mode 100755 index 00000000..7510472b Binary files /dev/null and b/data/interfaces/default/images/publisherlogos/logo-imagecomics.png differ diff --git a/data/interfaces/default/images/logo-marvel.jpg b/data/interfaces/default/images/publisherlogos/logo-marvel.jpg similarity index 100% rename from data/interfaces/default/images/logo-marvel.jpg rename to data/interfaces/default/images/publisherlogos/logo-marvel.jpg diff --git a/data/interfaces/default/images/logo-midtowncomics.jpg b/data/interfaces/default/images/publisherlogos/logo-midtowncomics.jpg similarity index 100% rename from data/interfaces/default/images/logo-midtowncomics.jpg rename to data/interfaces/default/images/publisherlogos/logo-midtowncomics.jpg diff --git a/data/interfaces/default/images/publisherlogos/logo-onipress.png b/data/interfaces/default/images/publisherlogos/logo-onipress.png new file mode 100755 index 00000000..3e8fee80 Binary files /dev/null and b/data/interfaces/default/images/publisherlogos/logo-onipress.png differ diff --git a/data/interfaces/default/images/publisherlogos/logo-red5comics.png b/data/interfaces/default/images/publisherlogos/logo-red5comics.png new file mode 100755 index 00000000..8db74895 Binary files /dev/null and b/data/interfaces/default/images/publisherlogos/logo-red5comics.png differ diff --git a/data/interfaces/default/images/publisherlogos/logo-shadowline.png b/data/interfaces/default/images/publisherlogos/logo-shadowline.png new file mode 100755 index 00000000..55261f8d Binary files /dev/null and b/data/interfaces/default/images/publisherlogos/logo-shadowline.png differ diff --git a/data/interfaces/default/images/publisherlogos/logo-skybound.jpg b/data/interfaces/default/images/publisherlogos/logo-skybound.jpg new file mode 100755 index 00000000..571984df Binary files /dev/null and b/data/interfaces/default/images/publisherlogos/logo-skybound.jpg differ diff --git a/data/interfaces/default/images/publisherlogos/logo-tokyopop.jpg b/data/interfaces/default/images/publisherlogos/logo-tokyopop.jpg new file mode 100755 index 00000000..c7a857b7 Binary files /dev/null and b/data/interfaces/default/images/publisherlogos/logo-tokyopop.jpg differ diff --git a/data/interfaces/default/images/publisherlogos/logo-topcow.gif b/data/interfaces/default/images/publisherlogos/logo-topcow.gif new file mode 100755 index 00000000..85ebbe02 Binary files /dev/null and b/data/interfaces/default/images/publisherlogos/logo-topcow.gif differ diff --git a/data/interfaces/default/images/publisherlogos/logo-vertigo.jpg b/data/interfaces/default/images/publisherlogos/logo-vertigo.jpg new file mode 100755 index 00000000..4d9641e5 Binary files /dev/null and b/data/interfaces/default/images/publisherlogos/logo-vertigo.jpg differ diff --git a/data/interfaces/default/images/publisherlogos/logo-zenescope.jpg b/data/interfaces/default/images/publisherlogos/logo-zenescope.jpg new file mode 100755 index 00000000..02d496ea Binary files /dev/null and b/data/interfaces/default/images/publisherlogos/logo-zenescope.jpg differ diff --git a/data/interfaces/default/images/publisherlogos/logos.png b/data/interfaces/default/images/publisherlogos/logos.png new file mode 100755 index 00000000..65be5d0f Binary files /dev/null and b/data/interfaces/default/images/publisherlogos/logos.png differ diff --git a/mylar/__init__.py b/mylar/__init__.py index 1118fdae..33d761a2 100755 --- a/mylar/__init__.py +++ b/mylar/__init__.py @@ -194,10 +194,11 @@ COUNT_HAVES = 0 COMICSORT = None ANNUALS_ON = 0 -CV_ONLY = 0 -CV_ONETIMER = 0 +CV_ONLY = 1 +CV_ONETIMER = 1 GRABBAG_DIR = None HIGHCOUNT = 0 +READ2FILENAME = 0 def CheckSection(sec): """ Check if INI section exists, if not create it """ @@ -258,7 +259,7 @@ def initialize(): NEWZNAB, NEWZNAB_HOST, NEWZNAB_APIKEY, NEWZNAB_ENABLED, EXTRA_NEWZNABS,\ RAW, RAW_PROVIDER, RAW_USERNAME, RAW_PASSWORD, RAW_GROUPS, EXPERIMENTAL, \ PROWL_ENABLED, PROWL_PRIORITY, PROWL_KEYS, PROWL_ONSNATCH, NMA_ENABLED, NMA_APIKEY, NMA_PRIORITY, NMA_ONSNATCH, \ - PREFERRED_QUALITY, MOVE_FILES, RENAME_FILES, LOWERCASE_FILENAMES, USE_MINSIZE, MINSIZE, USE_MAXSIZE, MAXSIZE, CORRECT_METADATA, FOLDER_FORMAT, FILE_FORMAT, REPLACE_CHAR, REPLACE_SPACES, ADD_TO_CSV, CVINFO, LOG_LEVEL, POST_PROCESSING, SEARCH_DELAY, GRABBAG_DIR, \ + PREFERRED_QUALITY, MOVE_FILES, RENAME_FILES, LOWERCASE_FILENAMES, USE_MINSIZE, MINSIZE, USE_MAXSIZE, MAXSIZE, CORRECT_METADATA, FOLDER_FORMAT, FILE_FORMAT, REPLACE_CHAR, REPLACE_SPACES, ADD_TO_CSV, CVINFO, LOG_LEVEL, POST_PROCESSING, SEARCH_DELAY, GRABBAG_DIR, READ2FILENAME, \ COMIC_LOCATION, QUAL_ALTVERS, QUAL_SCANNER, QUAL_TYPE, QUAL_QUALITY, ENABLE_EXTRA_SCRIPTS, EXTRA_SCRIPTS, ENABLE_PRE_SCRIPTS, PRE_SCRIPTS, PULLNEW, COUNT_ISSUES, COUNT_HAVES, COUNT_COMICS, SYNO_FIX, ANNUALS_ON, CV_ONLY, CV_ONETIMER if __INITIALIZED__: @@ -336,7 +337,7 @@ def initialize(): GRABBAG_DIR = DESTINATION_DIR HIGHCOUNT = check_setting_str(CFG, 'General', 'highcount', '') if not HIGHCOUNT: HIGHCOUNT = 0 - + READ2FILENAME = bool(check_setting_int(CFG, 'General', 'read2filename', 0)) PROWL_ENABLED = bool(check_setting_int(CFG, 'Prowl', 'prowl_enabled', 0)) PROWL_KEYS = check_setting_str(CFG, 'Prowl', 'prowl_keys', '') PROWL_ONSNATCH = bool(check_setting_int(CFG, 'Prowl', 'prowl_onsnatch', 0)) @@ -355,15 +356,15 @@ def initialize(): CVINFO = bool(check_setting_int(CFG, 'General', 'cvinfo', 0)) ANNUALS_ON = bool(check_setting_int(CFG, 'General', 'annuals_on', 0)) if not ANNUALS_ON: - #default to off + #default to on ANNUALS_ON = 0 - CV_ONLY = bool(check_setting_int(CFG, 'General', 'cv_only', 0)) + CV_ONLY = bool(check_setting_int(CFG, 'General', 'cv_only', 1)) if not CV_ONLY: - #default to off - CV_ONLY = 0 - CV_ONETIMER = bool(check_setting_int(CFG, 'General', 'cv_onetimer', 0)) + #default to on + CV_ONLY = 1 + CV_ONETIMER = bool(check_setting_int(CFG, 'General', 'cv_onetimer', 1)) if not CV_ONETIMER: - CV_ONETIMER = 0 + CV_ONETIMER = 1 LOG_LEVEL = check_setting_str(CFG, 'General', 'log_level', '') ENABLE_EXTRA_SCRIPTS = bool(check_setting_int(CFG, 'General', 'enable_extra_scripts', 0)) EXTRA_SCRIPTS = check_setting_str(CFG, 'General', 'extra_scripts', '') @@ -667,7 +668,7 @@ def config_write(): new_config['General']['search_delay'] = SEARCH_DELAY new_config['General']['grabbag_dir'] = GRABBAG_DIR new_config['General']['highcount'] = HIGHCOUNT - + new_config['General']['read2filename'] = int(READ2FILENAME) new_config['General']['use_minsize'] = int(USE_MINSIZE) new_config['General']['minsize'] = MINSIZE new_config['General']['use_maxsize'] = int(USE_MAXSIZE) diff --git a/mylar/filechecker.py b/mylar/filechecker.py index c0bd14da..d9dd46fa 100755 --- a/mylar/filechecker.py +++ b/mylar/filechecker.py @@ -61,15 +61,17 @@ def listFiles(dir,watchcomic,AlternateSearch=None): subname = re.sub(subit, '', subname) volrem = subit - subname = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\&\+\'\?\@]',' ', str(subname)) - modwatchcomic = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\&\+\'\?\@]', ' ', u_watchcomic) + subname = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\+\'\?\@]',' ', str(subname)) + modwatchcomic = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\+\'\?\@]', ' ', u_watchcomic) + modwatchcomic = re.sub('\&', ' and ', modwatchcomic) modwatchcomic = re.sub('\s+', ' ', str(modwatchcomic)).strip() - + subname = re.sub('&', ' and ', subname) subname = re.sub('\s+', ' ', str(subname)).strip() if AlternateSearch is not None: #same = encode. u_altsearchcomic = AlternateSearch.encode('ascii', 'ignore').strip() - altsearchcomic = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\&\+\'\?\@]', ' ', u_altsearchcomic) + altsearchcomic = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\+\'\?\@]', ' ', u_altsearchcomic) + altseachcomic = re.sub('&', ' and ', altsearchcomic) altsearchcomic = re.sub('\s+', ' ', str(altsearchcomic)).strip() else: #create random characters so it will never match. diff --git a/mylar/importer.py b/mylar/importer.py index 2fac2498..2e8f8ca7 100755 --- a/mylar/importer.py +++ b/mylar/importer.py @@ -608,7 +608,7 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None): if pullupd is None: # lets' check the pullist for anything at this time as well since we're here. # do this for only Present comics.... - if mylar.AUTOWANT_UPCOMING: #and 'Present' in gcdinfo['resultPublished']: + if mylar.AUTOWANT_UPCOMING and lastpubdate == 'Present': #and 'Present' in gcdinfo['resultPublished']: logger.info(u"Checking this week's pullist for new issues of " + comic['ComicName']) updater.newpullcheck(comic['ComicName'], comicid) diff --git a/mylar/librarysync.py b/mylar/librarysync.py index fb004dd4..d0ab3882 100755 --- a/mylar/librarysync.py +++ b/mylar/librarysync.py @@ -224,6 +224,12 @@ def libraryScan(dir=None, append=False, ComicID=None, ComicName=None, cron=None) com_NAME = cname print ("com_NAME : " + com_NAME) yearmatch = "True" + else: + # we're assuming that the year is in brackets (and it should be damnit) + if m[cnt][:-2] == '19' or m[cnt][:-2] == '20': + print ("year detected: " + str(m[cnt])) + ydetected = 'yes' + result_comyear = m[cnt] cnt+=1 splitit = [] diff --git a/mylar/mb.py b/mylar/mb.py index d66d7788..26b66689 100755 --- a/mylar/mb.py +++ b/mylar/mb.py @@ -87,7 +87,7 @@ def findComic(name, mode, issue, limityear=None): xmlcnt = result.getElementsByTagName('count_of_issues')[0].firstChild.wholeText #here we can determine what called us, and either start gathering all issues or just limited ones. #print ("n: " + str(n) + "--xmcnt" + str(xmlcnt)) - if issue is not None and issue.isdigit(): + if issue is not None and str(issue).isdigit(): #this gets buggered up with NEW/ONGOING series because the db hasn't been updated #to reflect the proper count. Drop it by 1 to make sure. limiter = int(issue) - 1 diff --git a/mylar/webserve.py b/mylar/webserve.py index 06eedbd9..efc484c7 100755 --- a/mylar/webserve.py +++ b/mylar/webserve.py @@ -468,7 +468,8 @@ class WebInterface(object): raise cherrypy.HTTPRedirect("home") addArtists.exposed = True - def queueissue(self, mode, ComicName=None, ComicID=None, ComicYear=None, ComicIssue=None, IssueID=None, new=False, redirect=None): + def queueissue(self, mode, ComicName=None, ComicID=None, ComicYear=None, ComicIssue=None, IssueID=None, new=False, redirect=None, SeriesYear=None): + print 'tada' now = datetime.datetime.now() myDB = db.DBConnection() #mode dictates type of queue - either 'want' for individual comics, or 'series' for series watchlist. @@ -479,6 +480,17 @@ class WebInterface(object): # we can limit the search by including the issue # and searching for # comics that have X many issues raise cherrypy.HTTPRedirect("searchit?name=%s&issue=%s&mode=%s" % (ComicName, 'None', 'pullseries')) + elif ComicID is None and mode == 'readlist': + # this is for marking individual comics from a readlist to be downloaded. + # Because there is no associated ComicID or IssueID, follow same pattern as in 'pullwant' + # except we know the Year + if ComicYear is None: ComicYear = SeriesYear + logger.info(u"Marking " + ComicName + " " + ComicIssue + " as wanted...") + foundcom = search.search_init(ComicName=ComicName, IssueNumber=ComicIssue, ComicYear=ComicYear, SeriesYear=None, IssueDate=None, IssueID=None, AlternateSearch=None, UseFuzzy=None, ComicVersion=None) + if foundcom == "yes": + logger.info(u"Downloaded " + ComicName + " #" + ComicIssue + " (" + str(ComicYear) + ")") + raise cherrypy.HTTPRedirect("readlist") + elif ComicID is None and mode == 'pullwant': #this is for marking individual comics from the pullist to be downloaded. #because ComicID and IssueID will both be None due to pullist, it's probably @@ -763,17 +775,20 @@ class WebInterface(object): myDB = db.DBConnection() readlist = myDB.select("SELECT * from readinglist group by StoryArcID COLLATE NOCASE") issuelist = myDB.select("SELECT * from readlist") - return serve_template(templatename="readinglist.html", title="Readlist", readlist=readlist, issuelist=issuelist) + readConfig = { + "read2filename" : helpers.checked(mylar.READ2FILENAME) + } + return serve_template(templatename="readinglist.html", title="Readlist", readlist=readlist, issuelist=issuelist,readConfig=readConfig) return page readlist.exposed = True def detailReadlist(self,StoryArcID, StoryArcName): myDB = db.DBConnection() readlist = myDB.select("SELECT * from readinglist WHERE StoryArcID=? order by ReadingOrder ASC", [StoryArcID]) - return serve_template(templatename="readlist.html", title="Detailed Arc list", readlist=readlist, storyarcname=StoryArcName) + return serve_template(templatename="readlist.html", title="Detailed Arc list", readlist=readlist, storyarcname=StoryArcName, storyarcid=StoryArcID) detailReadlist.exposed = True - def removefromreadlist(self, IssueID=None, StoryArcID=None, IssueArcID=None): + def removefromreadlist(self, IssueID=None, StoryArcID=None, IssueArcID=None, AllRead=None): myDB = db.DBConnection() if IssueID: myDB.action('DELETE from readlist WHERE IssueID=?', [IssueID]) @@ -784,6 +799,9 @@ class WebInterface(object): elif IssueArcID: myDB.action('DELETE from readinglist WHERE IssueArcID=?', [IssueArcID]) logger.info("Removed " + str(IssueArcID) + " from the Story Arc.") + elif AllRead: + myDB.action("DELETE from readlist WHERE Status='Read'") + logger.info("Removed All issues that have been marked as Read from Reading List") removefromreadlist.exposed = True def markasRead(self, IssueID=None, IssueArcID=None): @@ -870,42 +888,78 @@ class WebInterface(object): "TotalIssues": len(tracks)} myDB.upsert("readinglist", NewVals, CtrlVal) i+=1 - + raise cherrypy.HTTPRedirect("detailReadlist?StoryArcID=%s&StoryArcName=%s" % (storyarcid, storyarc)) importReadlist.exposed = True #Story Arc Ascension...welcome to the next level :) - def ArcWatchlist(self): + def ArcWatchlist(self,StoryArcID=None): myDB = db.DBConnection() - ArcWatch = myDB.select("SELECT * FROM readinglist") + if StoryArcID: + ArcWatch = myDB.select("SELECT * FROM readinglist WHERE StoryArcID=?", [StoryArcID]) + else: + ArcWatch = myDB.select("SELECT * FROM readinglist") if ArcWatch is None: logger.info("No Story Arcs to search") else: Comics = myDB.select("SELECT * FROM comics") arc_match = [] + wantedlist = [] + + showonreadlist = 1 # 0 won't show storyarcissues on readinglist main page, 1 will show for arc in ArcWatch: - print ("arc: " + str(arc['ComicName'])) + logger.fdebug("arc: " + arc['storyarc'] + " : " + arc['ComicName'] + " : " + arc['IssueNumber']) #cycle through the story arcs here for matches on the watchlist mod_arc = re.sub('[\:/,\'\/\-\&\%\$\#\@\!\*\+\.]', '', arc['ComicName']) + mod_arc = re.sub('\\bthe\\b', '', mod_arc.lower()) + mod_arc = re.sub('\\band\\b', '', mod_arc.lower()) mod_arc = re.sub(r'\s', '', mod_arc) + matcheroso = "no" for comic in Comics: - print ("comic: " + comic['ComicName']) + logger.fdebug("comic: " + comic['ComicName']) mod_watch = re.sub('[\:\,\'\/\-\&\%\$\#\@\!\*\+\.]', '', comic['ComicName']) + mod_watch = re.sub('\\bthe\\b', '', mod_watch.lower()) + mod_watch = re.sub('\\band\\b', '', mod_watch.lower()) mod_watch = re.sub(r'\s', '', mod_watch) - if mod_watch == mod_arc and arc['SeriesYear'] == comic['SeriesYear']: - #gather the matches now. - arc_match.append({ - "match_name": arc['ComicName'], - "match_id": comic['ComicID'], - "match_issue": arc['IssueNumber'], - "match_issuearcid": arc['IssueArcID']}) - logger.fdebu("arc_Match:" + arc_match) - logger.fdebu("we matched on " + str(len(arc_match)) + " issues") + if mod_watch == mod_arc:# and arc['SeriesYear'] == comic['ComicYear']: + logger.fdebug("intial name match - confirming issue # is present in series") + if comic['ComicID'][:1] == 'G': + # if it's a multi-volume series, it's decimalized - let's get rid of the decimal. + GCDissue, whocares = helpers.decimal_issue(arc['IssueNumber']) + GCDissue = int(GCDissue) / 1000 + logger.fdebug("issue converted to " + str(GCDissue)) + isschk = myDB.action("SELECT * FROM issues WHERE ComicName=? AND Issue_Number=?", [comic['ComicName'], str(GCDissue)]).fetchone() + else: + isschk = myDB.action("SELECT * FROM issues WHERE ComicName=? AND Issue_Number=?", [comic['ComicName'], arc['IssueNumber']]).fetchone() + if isschk is None: + logger.fdebug("we matched on name, but issue " + str(arc['IssueNumber']) + " doesn't exist for " + comic['ComicName']) + else: + logger.fdebug("issue #: " + str(arc['IssueNumber']) + " is present!") + print isschk + print ("Comicname: " + arc['ComicName']) + #print ("ComicID: " + str(isschk['ComicID'])) + print ("Issue: " + arc['IssueNumber']) + print ("IssueArcID: " + arc['IssueArcID']) + #gather the matches now. + arc_match.append({ + "match_name": arc['ComicName'], + "match_id": isschk['ComicID'], + "match_issue": arc['IssueNumber'], + "match_issuearcid": arc['IssueArcID'], + "match_seriesyear": comic['ComicYear']}) + matcheroso = "yes" + if matcheroso == "no": + logger.fdebug("Unable to find a match for " + arc['ComicName'] + " :#" + str(arc['IssueNumber'])) + wantedlist.append({ + "ComicName": arc['ComicName'], + "IssueNumber": arc['IssueNumber'], + "IssueYear": arc['IssueYear']}) + + logger.fdebug("we matched on " + str(len(arc_match)) + " issues") for m_arc in arc_match: - print m_arc #now we cycle through the issues looking for a match. - issue = myDB.action("SELECT * FROM issues where ComicID=? and Issue_Number=?", [m_arc['match_id'],m_arc['match_issue']]) + issue = myDB.action("SELECT * FROM issues where ComicID=? and Issue_Number=?", [m_arc['match_id'],m_arc['match_issue']]).fetchone() if issue is None: pass else: logger.fdebug("issue: " + str(issue['Issue_Number']) + "..." + str(m_arc['match_issue'])) @@ -913,27 +967,59 @@ class WebInterface(object): if issue['Issue_Number'] == m_arc['match_issue']: logger.fdebug("we matched on " + str(issue['Issue_Number']) + " for " + str(m_arc['match_name'])) if issue['Status'] == 'Downloaded' or issue['Status'] == 'Archived': - ctrlVal = {"IssueArcID": match_issuearcid } - newVal = {"Status": issue['Status']} + ctrlVal = {"IssueArcID": m_arc['match_issuearcid'] } + newVal = {"Status": issue['Status'], + "IssueID": issue['IssueID']} + if showonreadlist: + showctrlVal = {"IssueID": issue['IssueID']} + shownewVal = {"ComicName": issue['ComicName'], + "Issue_Number": issue['Issue_Number'], + "IssueDate": issue['IssueDate'], + "SeriesYear": m_arc['match_seriesyear'], + "ComicID": m_arc['match_id']} + myDB.upsert("readlist", shownewVal, showctrlVal) + myDB.upsert("readinglist",newVal,ctrlVal) - logger.info("Already have " + match_issuearcid) - break + logger.info("Already have " + issue['ComicName'] + " :# " + str(issue['Issue_Number'])) + else: + logger.fdebug("We don't have " + issue['ComicName'] + " :# " + str(issue['Issue_Number'])) + ctrlVal = {"IssueArcID": m_arc['match_issuearcid'] } + newVal = {"Status": "Wanted", + "IssueID": issue['IssueID']} + myDB.upsert("readinglist",newVal,ctrlVal) + logger.info("Marked " + issue['ComicName'] + " :# " + str(issue['Issue_Number']) + " as WANTED.") + + ArcWatchlist.exposed = True + def ReadMassCopy(self, StoryArcID, StoryArcName): + #this copies entire story arcs into the /cache/ folder + #alternatively, it will copy the issues individually directly to a 3rd party device (ie.tablet) + + myDB = db.DBConnection() + copylist = myDB.select("SELECT * FROM readlist WHERE StoryArcID=? AND Status='Downloaded'", [StoryArcID]) + if copylist is None: + logger.fdebug("You don't have any issues from " + StoryArcName + ". Aborting Mass Copy.") + return + else: + dst = os.path.join(mylar.CACHE, StoryArcName) + for files in copylist: + + copyloc = files['Location'] + + ReadMassCopy.exposed = True + def logs(self): if mylar.LOG_LEVEL is None or mylar.LOG_LEVEL == '': - mylar.LOG_LEVEL = 'info' + mylar.LOG_LEVEL = 'INFO' return serve_template(templatename="logs.html", title="Log", lineList=mylar.LOG_LIST, log_level=mylar.LOG_LEVEL) logs.exposed = True - def log_change(self, **args): - print ("here: " + str(args)) - for loglevel in args: - if loglevel is None: continue - else: - print ("changing logger to " + str(loglevel)) - LOGGER.setLevel(loglevel) - return serve_template(templatename="logs.html", title="Log", lineList=mylar.LOG_LIST) + def log_change(self, loglevel): + if log_level is not None: + print ("changing logger to " + str(log_level)) + LOGGER.setLevel(log_level) + return serve_template(templatename="logs.html", title="Log", lineList=mylar.LOG_LIST, log_level=log_level) log_change.exposed = True def clearhistory(self, type=None): @@ -947,21 +1033,45 @@ class WebInterface(object): raise cherrypy.HTTPRedirect("history") clearhistory.exposed = True - def downloadLocal(self, IssueID): - #print ("issueid: " + str(IssueID)) + def downloadLocal(self, IssueID=None, IssueArcID=None, ReadOrder=None): + print "tada" + print ("issueid: " + str(IssueID)) myDB = db.DBConnection() - issueDL = myDB.action("SELECT * FROM issues WHERE IssueID=?", [IssueID]).fetchone() - comicid = issueDL['ComicID'] - #print ("comicid: " + str(comicid)) - comic = myDB.action("SELECT * FROM comics WHERE ComicID=?", [comicid]).fetchone() - issueLOC = comic['ComicLocation'] - #print ("IssueLOC: " + str(issueLOC)) - issueFILE = issueDL['Location'] - #print ("IssueFILE: "+ str(issueFILE)) - issuePATH = os.path.join(issueLOC,issueFILE) - #print ("IssuePATH: " + str(issuePATH)) - dstPATH = os.path.join(mylar.CACHE_DIR, issueFILE) - #print ("dstPATH: " + str(dstPATH)) + if IssueID: + issueDL = myDB.action("SELECT * FROM issues WHERE IssueID=?", [IssueID]).fetchone() + comicid = issueDL['ComicID'] + #print ("comicid: " + str(comicid)) + comic = myDB.action("SELECT * FROM comics WHERE ComicID=?", [comicid]).fetchone() + #---issue info + comicname = comic['ComicName'] + issuenum = issueDL['Issue_Number'] + issuedate = issueDL['IssueDate'] + seriesyear = comic['ComicYear'] + #--- + issueLOC = comic['ComicLocation'] + #print ("IssueLOC: " + str(issueLOC)) + issueFILE = issueDL['Location'] + #print ("IssueFILE: "+ str(issueFILE)) + issuePATH = os.path.join(issueLOC,issueFILE) + #print ("IssuePATH: " + str(issuePATH)) + dstPATH = os.path.join(mylar.CACHE_DIR, issueFILE) + #print ("dstPATH: " + str(dstPATH)) + if IssueArcID: + if mylar.READ2FILENAME: + #if it's coming from a StoryArc, check to see if we're appending the ReadingOrder to the filename + ARCissueFILE = ReadOrder + "-" + issueFILE + dstPATH = os.path.join(mylar.CACHE_DIR, ARCissueFILE) +# issueDL = myDB.action("SELECT * FROM readinglist WHERE IssueArcID=?", [IssueArcID]).fetchone() +# storyarcid = issueDL['StoryArcID'] +# #print ("comicid: " + str(comicid)) +# issueLOC = mylar.DESTINATION_DIR +# #print ("IssueLOC: " + str(issueLOC)) +# issueFILE = issueDL['Location'] +# #print ("IssueFILE: "+ str(issueFILE)) +# issuePATH = os.path.join(issueLOC,issueFILE) +# #print ("IssuePATH: " + str(issuePATH)) +# dstPATH = os.path.join(mylar.CACHE_DIR, issueFILE) +# #print ("dstPATH: " + str(dstPATH)) try: shutil.copy2(issuePATH, dstPATH) except IOError as e: @@ -974,7 +1084,11 @@ class WebInterface(object): 'Location': issueFILE} myDB.upsert("readlist", newValueDict, controlValueDict) myDB.upsert("issues", newValueDict, controlValueDict) - + if IssueArcID: + controlValueD = {'IssueArcID': IssueArcID} + newValueDict = {'inCacheDIR': 'True', + 'Location': ARCissueFILE} + myDB.upsert("readinglist", newValueDict, controlValueD) #print("DB updated - Download link now enabled.") downloadLocal.exposed = True