diff --git a/data/interfaces/default/artistredone.html b/data/interfaces/default/artistredone.html index 13198915..eba7cb5a 100755 --- a/data/interfaces/default/artistredone.html +++ b/data/interfaces/default/artistredone.html @@ -220,11 +220,12 @@ selected issues + - + diff --git a/mylar/__init__.py b/mylar/__init__.py index efb72f85..f4bfdf8b 100755 --- a/mylar/__init__.py +++ b/mylar/__init__.py @@ -115,6 +115,7 @@ AUTOWANT_UPCOMING = True AUTOWANT_ALL = False COMIC_COVER_LOCAL = False ADD_TO_CSV = True +SKIPPED2WANTED = False SAB_HOST = None SAB_USERNAME = None @@ -530,7 +531,6 @@ def config_write(): new_config['General']['use_maxsize'] = int(USE_MAXSIZE) new_config['General']['maxsize'] = MAXSIZE new_config['General']['add_to_csv'] = int(ADD_TO_CSV) - new_config['General']['enable_extra_scripts'] = int(ENABLE_EXTRA_SCRIPTS) new_config['General']['extra_scripts'] = EXTRA_SCRIPTS new_config['General']['enable_pre_scripts'] = int(ENABLE_PRE_SCRIPTS) diff --git a/mylar/search.py b/mylar/search.py index 7af39860..df00f585 100755 --- a/mylar/search.py +++ b/mylar/search.py @@ -503,7 +503,7 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, nzbprov, nzbpr, Is logger.fdebug(str(n) + " Comparing: " + str(watchcomic_split[n]) + " .to. " + str(splitit[n])) if '+' in watchcomic_split[n]: watchcomic_split[n] = re.sub('+', '', str(watchcomic_split[n])) - if str(watchcomic_split[n].lower()) in str(splitit[n].lower()): + if str(watchcomic_split[n].lower()) in str(splitit[n].lower()) and len(watchcomic_split[n]) >= len(splitit[n]): logger.fdebug("word matched on : " + str(splitit[n])) scount+=1 #elif ':' in splitit[n] or '-' in splitit[n]: @@ -607,6 +607,8 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, nzbprov, nzbpr, Is urllib.urlretrieve(linkapi, str(mylar.BLACKHOLE_DIR) + str(filenamenzb)) logger.fdebug("filename saved to your blackhole as : " + str(filenamenzb)) logger.info(u"Successfully sent .nzb to your Blackhole directory : " + str(mylar.BLACKHOLE_DIR) + str(filenamenzb) ) + nzbname = filenamenzb[:-4] + logger.fdebug("nzb name to be used for post-processing is : " + str(nzbname)) #end blackhole else: @@ -627,6 +629,17 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, nzbprov, nzbpr, Is logger.fdebug("link to retrieve via api:" + str(linkapi)) + #let's change all space to decimals for simplicity + nzbname = re.sub(" ", ".", str(entry['title'])) + nzbname = re.sub('[\,\:]', '', str(nzbname)) + extensions = ('.cbr', '.cbz') + + if nzbname.lower().endswith(extensions): + fd, ext = os.path.splitext(nzbname) + logger.fdebug("Removed extension from nzb: " + ext) + nzbname = re.sub(str(ext), '', str(nzbname)) + + logger.fdebug("nzbname used for post-processing:" + str(nzbname)) #we need to change the nzbx string now to allow for the nzbname rename. if nzbprov == 'nzbx': @@ -673,22 +686,6 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, nzbprov, nzbpr, Is # logger.info(u"Removed temporary save file") #raise an exception to break out of loop - #let's change all space to decimals for simplicity - if mylar.BLACKHOLE: - bhole_cname = re.sub('[/:/,\/]', '', str(ComicName)) - nzbname = str(re.sub(" ", ".", str(bhole_cname))) + "." + str(IssueNumber) + ".(" + str(comyear) + ")" - else: - nzbname = re.sub(" ", ".", str(entry['title'])) - nzbname = re.sub('[\,\:]', '', str(nzbname)) - extensions = ('.cbr', '.cbz') - - if nzbname.lower().endswith(extensions): - fd, ext = os.path.splitext(nzbname) - logger.fdebug("Removed extension from nzb: " + ext) - nzbname = re.sub(str(ext), '', str(nzbname)) - - - logger.fdebug("nzbname used for post-processing:" + str(nzbname)) foundc = "yes" done = True diff --git a/mylar/webserve.py b/mylar/webserve.py index ec837306..582bbb18 100755 --- a/mylar/webserve.py +++ b/mylar/webserve.py @@ -70,12 +70,14 @@ class WebInterface(object): if comic is None: raise cherrypy.HTTPRedirect("home") usethefuzzy = comic['UseFuzzy'] + skipped2wanted = "0" if usethefuzzy is None: usethefuzzy = "0" comicConfig = { "comiclocation" : mylar.COMIC_LOCATION, "fuzzy_year0" : helpers.radio(int(usethefuzzy), 0), "fuzzy_year1" : helpers.radio(int(usethefuzzy), 1), - "fuzzy_year2" : helpers.radio(int(usethefuzzy), 2) + "fuzzy_year2" : helpers.radio(int(usethefuzzy), 2), + "skipped2wanted" : helpers.checked(skipped2wanted) } return serve_template(templatename="artistredone.html", title=comic['ComicName'], comic=comic, issues=issues, comicConfig=comicConfig) artistPage.exposed = True @@ -444,6 +446,25 @@ class WebInterface(object): return serve_template(templatename="upcoming.html", title="Upcoming", upcoming=upcoming, issues=issues) upcoming.exposed = True + def skipped2wanted(self, comicid): + # change all issues for a given ComicID that are Skipped, into Wanted. + issuestowanted = [] + issuesnumwant = [] + myDB = db.DBConnection() + skipped2 = myDB.select("SELECT * from issues WHERE ComicID=? AND Status='Skipped'", [comicid]) + for skippy in skipped2: + mvcontroldict = {"IssueID": skippy['IssueID']} + mvvalues = {"Status": "Wanted"} + #print ("Changing issue " + str(skippy['Issue_Number']) + " to Wanted.") + myDB.upsert("issues", mvvalues, mvcontroldict) + issuestowanted.append(skippy['IssueID']) + issuesnumwant.append(skippy['Issue_Number']) + if len(issuestowanted) > 0 : + logger.info("Marking issues: %s as Wanted" % issuesnumwant) + threading.Thread(target=search.searchIssueIDList, args=[issuestowanted]).start() + raise cherrypy.HTTPRedirect("artistPage?ComicID=%s" % [comicid]) + skipped2wanted.exposed = True + def searchScan(self, name): return serve_template(templatename="searchfix.html", title="Manage", name=name) searchScan.exposed = True
Number Name Date