2012-09-14 17:29:01 +00:00
# This file is part of Mylar.
2012-09-13 15:27:34 +00:00
#
2012-09-14 17:29:01 +00:00
# Mylar is free software: you can redistribute it and/or modify
2012-09-13 15:27:34 +00:00
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
2012-09-14 17:29:01 +00:00
# Mylar is distributed in the hope that it will be useful,
2012-09-13 15:27:34 +00:00
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
2012-09-14 17:29:01 +00:00
# along with Mylar. If not, see <http://www.gnu.org/licenses/>.
2012-09-13 15:27:34 +00:00
import os
import cherrypy
2012-12-31 16:52:16 +00:00
import datetime
2013-01-06 08:51:44 +00:00
import re
2012-09-13 15:27:34 +00:00
from mako . template import Template
from mako . lookup import TemplateLookup
from mako import exceptions
import time
import threading
2013-01-11 21:20:51 +00:00
import csv
import platform
2012-09-13 15:27:34 +00:00
import mylar
2013-01-11 21:20:51 +00:00
from mylar import logger , db , importer , mb , search , filechecker , helpers , updater , parseit , weeklypull , PostProcessor , version
2012-09-13 15:27:34 +00:00
#from mylar.helpers import checked, radio, today
import lib . simplejson as simplejson
from operator import itemgetter
def serve_template ( templatename , * * kwargs ) :
interface_dir = os . path . join ( str ( mylar . PROG_DIR ) , ' data/interfaces/ ' )
template_dir = os . path . join ( str ( interface_dir ) , mylar . INTERFACE )
_hplookup = TemplateLookup ( directories = [ template_dir ] )
try :
template = _hplookup . get_template ( templatename )
return template . render ( * * kwargs )
except :
return exceptions . html_error_template ( ) . render ( )
class WebInterface ( object ) :
def index ( self ) :
raise cherrypy . HTTPRedirect ( " home " )
index . exposed = True
def home ( self ) :
myDB = db . DBConnection ( )
comics = myDB . select ( ' SELECT * from comics order by ComicSortName COLLATE NOCASE ' )
return serve_template ( templatename = " index.html " , title = " Home " , comics = comics )
home . exposed = True
def artistPage ( self , ComicID ) :
myDB = db . DBConnection ( )
comic = myDB . action ( ' SELECT * FROM comics WHERE ComicID=? ' , [ ComicID ] ) . fetchone ( )
issues = myDB . select ( ' SELECT * from issues WHERE ComicID=? order by Int_IssueNumber DESC ' , [ ComicID ] )
if comic is None :
raise cherrypy . HTTPRedirect ( " home " )
2012-09-14 17:29:01 +00:00
comicConfig = {
2013-01-13 15:59:46 +00:00
" comiclocation " : mylar . COMIC_LOCATION ,
" use_fuzzy " : comic [ ' UseFuzzy ' ]
2012-09-14 17:29:01 +00:00
}
return serve_template ( templatename = " artistredone.html " , title = comic [ ' ComicName ' ] , comic = comic , issues = issues , comicConfig = comicConfig )
2012-09-13 15:27:34 +00:00
artistPage . exposed = True
def searchit ( self , name , issue = None , mode = None ) :
type = ' comic ' # let's default this to comic search only for the time being (will add story arc, characters, etc later)
#mode dictates type of search:
# --series ... search for comicname displaying all results
# --pullseries ... search for comicname displaying a limited # of results based on issue
# --want ... individual comics
if mode is None : mode = ' series '
if len ( name ) == 0 :
raise cherrypy . HTTPRedirect ( " home " )
if type == ' comic ' and mode == ' pullseries ' :
searchresults = mb . findComic ( name , mode , issue = issue )
elif type == ' comic ' and mode == ' series ' :
searchresults = mb . findComic ( name , mode , issue = None )
elif type == ' comic ' and mode == ' want ' :
searchresults = mb . findComic ( name , mode , issue )
searchresults = sorted ( searchresults , key = itemgetter ( ' comicyear ' , ' issues ' ) , reverse = True )
#print ("Results: " + str(searchresults))
return serve_template ( templatename = " searchresults.html " , title = ' Search Results for: " ' + name + ' " ' , searchresults = searchresults , type = type )
searchit . exposed = True
2013-01-11 21:20:51 +00:00
def addComic ( self , comicid , comicname = None , comicyear = None , comicimage = None , comicissues = None , comicpublisher = None ) :
2012-10-16 08:16:29 +00:00
myDB = db . DBConnection ( )
sresults = [ ]
2013-01-11 21:20:51 +00:00
cresults = [ ]
2012-10-16 08:16:29 +00:00
mismatch = " no "
2013-01-11 21:20:51 +00:00
print ( " comicid: " + str ( comicid ) )
print ( " comicname: " + str ( comicname ) )
print ( " comicyear: " + str ( comicyear ) )
print ( " comicissues: " + str ( comicissues ) )
print ( " comicimage: " + str ( comicimage ) )
2012-10-16 08:16:29 +00:00
#here we test for exception matches (ie. comics spanning more than one volume, known mismatches, etc).
CV_EXcomicid = myDB . action ( " SELECT * from exceptions WHERE ComicID=? " , [ comicid ] ) . fetchone ( )
2013-01-11 21:20:51 +00:00
if CV_EXcomicid is None : # pass #
gcdinfo = parseit . GCDScraper ( comicname , comicyear , comicissues , comicid )
if gcdinfo == " No Match " :
2013-01-13 15:59:46 +00:00
#when it no matches, the image will always be blank...let's fix it.
cvdata = mylar . cv . getComic ( comicid , ' comic ' )
comicimage = cvdata [ ' ComicImage ' ]
2013-01-11 21:20:51 +00:00
updater . no_searchresults ( comicid )
nomatch = " true "
logger . info ( u " I couldn ' t find an exact match for " + str ( comicname ) + " ( " + str ( comicyear ) + " ) - gathering data for Error-Checking screen (this could take a minute)... " )
i = 0
loopie , cnt = parseit . ComChk ( comicname , comicyear , comicpublisher , comicissues , comicid )
print ( " total count : " + str ( cnt ) )
while ( i < cnt ) :
try :
stoopie = loopie [ ' comchkchoice ' ] [ i ]
except ( IndexError , TypeError ) :
break
cresults . append ( {
' ComicID ' : stoopie [ ' ComicID ' ] ,
' ComicName ' : stoopie [ ' ComicName ' ] ,
' ComicYear ' : stoopie [ ' ComicYear ' ] ,
' ComicIssues ' : stoopie [ ' ComicIssues ' ] ,
' ComicURL ' : stoopie [ ' ComicURL ' ] ,
' ComicPublisher ' : stoopie [ ' ComicPublisher ' ] ,
' GCDID ' : stoopie [ ' GCDID ' ]
} )
i + = 1
return serve_template ( templatename = " searchfix.html " , title = " Error Check " , comicname = comicname , comicid = comicid , comicyear = comicyear , comicimage = comicimage , comicissues = comicissues , cresults = cresults )
else :
nomatch = " false "
logger . info ( u " Quick match success..continuing. " )
2012-10-16 08:16:29 +00:00
else :
if CV_EXcomicid [ ' variloop ' ] == ' 99 ' :
logger . info ( u " mismatched name...autocorrecting to correct GID and auto-adding. " )
mismatch = " yes "
if CV_EXcomicid [ ' NewComicID ' ] == ' none ' :
logger . info ( u " multi-volume series detected " )
testspx = CV_EXcomicid [ ' GComicID ' ] . split ( ' / ' )
for exc in testspx :
fakeit = parseit . GCDAdd ( testspx )
howmany = int ( CV_EXcomicid [ ' variloop ' ] )
t = 0
while ( t < = howmany ) :
try :
sres = fakeit [ ' serieschoice ' ] [ t ]
except IndexError :
break
sresults . append ( {
' ComicID ' : sres [ ' ComicID ' ] ,
' ComicName ' : sres [ ' ComicName ' ] ,
' ComicYear ' : sres [ ' ComicYear ' ] ,
' ComicIssues ' : sres [ ' ComicIssues ' ] ,
' ComicPublisher ' : sres [ ' ComicPublisher ' ] ,
' ComicCover ' : sres [ ' ComicCover ' ]
} )
t + = 1
#searchfix(-1).html is for misnamed comics and wrong years.
#searchfix-2.html is for comics that span multiple volumes.
return serve_template ( templatename = " searchfix-2.html " , title = " In-Depth Results " , sresults = sresults )
threading . Thread ( target = importer . addComictoDB , args = [ comicid , mismatch ] ) . start ( )
2012-09-13 15:27:34 +00:00
raise cherrypy . HTTPRedirect ( " artistPage?ComicID= %s " % comicid )
addComic . exposed = True
2012-10-16 08:16:29 +00:00
2013-01-11 21:20:51 +00:00
def from_Exceptions ( self , comicid , gcdid , comicname = None , comicyear = None , comicissues = None , comicpublisher = None ) :
mismatch = " yes "
print ( " gcdid: " + str ( gcdid ) )
#write it to the custom_exceptions.csv and reload it so that importer will pick it up and do it's thing :)
#custom_exceptions in this format...
#99, (comicid), (gcdid), none
logger . info ( " saving new information into custom_exceptions.csv... " )
except_info = " none # " + str ( comicname ) + " -( " + str ( comicyear ) + " ) "
with open ( ' custom_exceptions.csv ' , ' a ' ) as f :
f . write ( ' %s , %s , %s , %s \n ' % ( " 99 " , str ( comicid ) , str ( gcdid ) , str ( except_info ) ) )
logger . info ( " re-loading csv file so it ' s all nice and current. " )
mylar . csv_load ( )
threading . Thread ( target = importer . addComictoDB , args = [ comicid , mismatch ] ) . start ( )
raise cherrypy . HTTPRedirect ( " artistPage?ComicID= %s " % comicid )
from_Exceptions . exposed = True
2012-10-16 08:16:29 +00:00
def GCDaddComic ( self , comicid , comicname = None , comicyear = None , comicissues = None , comiccover = None , comicpublisher = None ) :
#since we already know most of the info, let's add it to the db so we can reference it later.
myDB = db . DBConnection ( )
gcomicid = " G " + str ( comicid )
comicyear_len = comicyear . find ( ' ' , 2 )
comyear = comicyear [ comicyear_len + 1 : comicyear_len + 5 ]
controlValueDict = { ' ComicID ' : gcomicid }
newValueDict = { ' ComicName ' : comicname ,
' ComicYear ' : comyear ,
' ComicPublished ' : comicyear ,
' ComicPublisher ' : comicpublisher ,
' ComicImage ' : comiccover ,
' Total ' : comicissues }
myDB . upsert ( " comics " , newValueDict , controlValueDict )
threading . Thread ( target = importer . GCDimport , args = [ gcomicid ] ) . start ( )
2012-10-16 15:12:44 +00:00
raise cherrypy . HTTPRedirect ( " artistPage?ComicID= %s " % gcomicid )
2012-10-16 08:16:29 +00:00
GCDaddComic . exposed = True
2012-10-30 10:43:01 +00:00
def post_process ( self , nzb_name , nzb_folder ) :
logger . info ( u " Starting postprocessing for : " + str ( nzb_name ) )
2012-12-27 15:04:03 +00:00
PostProcess = PostProcessor . PostProcessor ( nzb_name , nzb_folder )
result = PostProcess . Process ( )
2012-10-30 10:43:01 +00:00
#result = post_results.replace("\n","<br />\n")
return result
#log2screen = threading.Thread(target=PostProcessor.PostProcess, args=[nzb_name,nzb_folder]).start()
#return serve_template(templatename="postprocess.html", title="postprocess")
#raise cherrypy.HTTPRedirect("artistPage?ComicID=%s" % comicid)
post_process . exposed = True
2012-09-13 15:27:34 +00:00
def pauseArtist ( self , ComicID ) :
logger . info ( u " Pausing comic: " + ComicID )
myDB = db . DBConnection ( )
controlValueDict = { ' ComicID ' : ComicID }
newValueDict = { ' Status ' : ' Paused ' }
myDB . upsert ( " comics " , newValueDict , controlValueDict )
raise cherrypy . HTTPRedirect ( " artistPage?ComicID= %s " % ComicID )
pauseArtist . exposed = True
def resumeArtist ( self , ComicID ) :
logger . info ( u " Resuming comic: " + ComicID )
myDB = db . DBConnection ( )
controlValueDict = { ' ComicID ' : ComicID }
newValueDict = { ' Status ' : ' Active ' }
myDB . upsert ( " comics " , newValueDict , controlValueDict )
raise cherrypy . HTTPRedirect ( " artistPage?ComicID= %s " % ComicID )
resumeArtist . exposed = True
def deleteArtist ( self , ComicID ) :
myDB = db . DBConnection ( )
comic = myDB . action ( ' SELECT * from comics WHERE ComicID=? ' , [ ComicID ] ) . fetchone ( )
2012-09-24 05:17:29 +00:00
if comic [ ' ComicName ' ] is None : ComicName = " None "
else : ComicName = comic [ ' ComicName ' ]
logger . info ( u " Deleting all traces of Comic: " + str ( ComicName ) )
2012-09-13 15:27:34 +00:00
myDB . action ( ' DELETE from comics WHERE ComicID=? ' , [ ComicID ] )
myDB . action ( ' DELETE from issues WHERE ComicID=? ' , [ ComicID ] )
raise cherrypy . HTTPRedirect ( " home " )
deleteArtist . exposed = True
def refreshArtist ( self , ComicID ) :
2012-10-16 08:16:29 +00:00
myDB = db . DBConnection ( )
mismatch = " no "
CV_EXcomicid = myDB . action ( " SELECT * from exceptions WHERE ComicID=? " , [ ComicID ] ) . fetchone ( )
if CV_EXcomicid is None : pass
else :
if CV_EXcomicid [ ' variloop ' ] == ' 99 ' :
mismatch = " yes "
if ComicID [ : 1 ] == " G " : threading . Thread ( target = importer . GCDimport , args = [ ComicID ] ) . start ( )
else : threading . Thread ( target = importer . addComictoDB , args = [ ComicID , mismatch ] ) . start ( )
2012-09-13 15:27:34 +00:00
raise cherrypy . HTTPRedirect ( " artistPage?ComicID= %s " % ComicID )
refreshArtist . exposed = True
def editIssue ( self , ComicID ) :
myDB = db . DBConnection ( )
comic = myDB . action ( ' SELECT * from comics WHERE ComicID=? ' , [ ComicID ] ) . fetchone ( )
title = ' Now Editing ' + comic [ ' ComicName ' ]
return serve_template ( templatename = " editcomic.html " , title = title , comic = comic )
#raise cherrypy.HTTPRedirect("artistPage?ComicID=%s" & ComicID)
editIssue . exposed = True
2012-10-16 08:16:29 +00:00
def markissues ( self , action = None , * * args ) :
2012-09-13 15:27:34 +00:00
myDB = db . DBConnection ( )
2012-10-16 08:16:29 +00:00
issuesToAdd = [ ]
2012-10-30 10:43:01 +00:00
issuestoArchive = [ ]
2012-09-13 15:27:34 +00:00
if action == ' WantedNew ' :
newaction = ' Wanted '
else :
newaction = action
for IssueID in args :
2012-10-16 15:53:46 +00:00
if IssueID is None : continue
2012-09-13 15:27:34 +00:00
else :
2012-10-16 08:16:29 +00:00
mi = myDB . action ( " SELECT * FROM issues WHERE IssueID=? " , [ IssueID ] ) . fetchone ( )
miyr = myDB . action ( " SELECT ComicYear FROM comics WHERE ComicID=? " , [ mi [ ' ComicID ' ] ] ) . fetchone ( )
2012-10-30 10:43:01 +00:00
if action == ' Downloaded ' :
if mi [ ' Status ' ] == " Skipped " or mi [ ' Status ' ] == " Wanted " :
logger . info ( u " Cannot change status to %s as comic is not Snatched or Downloaded " % ( newaction ) )
continue
elif action == ' Archived ' :
logger . info ( u " Marking %s %s as %s " % ( mi [ ' ComicName ' ] , mi [ ' Issue_Number ' ] , newaction ) )
#updater.forceRescan(mi['ComicID'])
issuestoArchive . append ( IssueID )
elif action == ' Wanted ' :
logger . info ( u " Marking %s %s as %s " % ( mi [ ' ComicName ' ] , mi [ ' Issue_Number ' ] , newaction ) )
issuesToAdd . append ( IssueID )
2012-10-16 08:16:29 +00:00
controlValueDict = { " IssueID " : IssueID }
newValueDict = { " Status " : newaction }
myDB . upsert ( " issues " , newValueDict , controlValueDict )
2012-10-30 10:43:01 +00:00
if len ( issuestoArchive ) > 0 :
updater . forceRescan ( mi [ ' ComicID ' ] )
2012-10-16 08:16:29 +00:00
if len ( issuesToAdd ) > 0 :
2012-10-30 10:43:01 +00:00
logger . debug ( " Marking issues: %s as Wanted " % issuesToAdd )
2012-10-16 08:16:29 +00:00
threading . Thread ( target = search . searchIssueIDList , args = [ issuesToAdd ] ) . start ( )
2012-10-16 15:53:46 +00:00
#if IssueID:
raise cherrypy . HTTPRedirect ( " artistPage?ComicID= %s " % mi [ ' ComicID ' ] )
#else:
# raise cherrypy.HTTPRedirect("upcoming")
2012-09-13 15:27:34 +00:00
markissues . exposed = True
def addArtists ( self , * * args ) :
threading . Thread ( target = importer . artistlist_to_mbids , args = [ args , True ] ) . start ( )
raise cherrypy . HTTPRedirect ( " home " )
addArtists . exposed = True
2012-10-01 15:01:21 +00:00
def queueissue ( self , mode , ComicName = None , ComicID = None , ComicYear = None , ComicIssue = None , IssueID = None , new = False , redirect = None ) :
2012-12-31 16:52:16 +00:00
now = datetime . datetime . now ( )
2012-09-19 04:38:25 +00:00
myDB = db . DBConnection ( )
2012-09-13 15:27:34 +00:00
#mode dictates type of queue - either 'want' for individual comics, or 'series' for series watchlist.
if ComicID is None and mode == ' series ' :
issue = None
raise cherrypy . HTTPRedirect ( " searchit?name= %s &issue= %s &mode= %s " % ( ComicName , ' None ' , ' series ' ) )
elif ComicID is None and mode == ' pullseries ' :
# we can limit the search by including the issue # and searching for
# comics that have X many issues
raise cherrypy . HTTPRedirect ( " searchit?name= %s &issue= %s &mode= %s " % ( ComicName , ' None ' , ' pullseries ' ) )
elif ComicID is None and mode == ' pullwant ' :
#this is for marking individual comics from the pullist to be downloaded.
#because ComicID and IssueID will both be None due to pullist, it's probably
#better to set both to some generic #, and then filter out later...
cyear = myDB . action ( " SELECT SHIPDATE FROM weekly " ) . fetchone ( )
ComicYear = str ( cyear [ ' SHIPDATE ' ] ) [ : 4 ]
2012-12-31 16:52:16 +00:00
if ComicYear == ' ' : ComicYear = now . year
2012-09-13 15:27:34 +00:00
logger . info ( u " Marking " + ComicName + " " + ComicIssue + " as wanted... " )
2012-10-30 10:43:01 +00:00
foundcom = search . search_init ( ComicName = ComicName , IssueNumber = ComicIssue , ComicYear = ComicYear , SeriesYear = None , IssueDate = cyear [ ' SHIPDATE ' ] , IssueID = IssueID )
2012-09-13 15:27:34 +00:00
if foundcom == " yes " :
logger . info ( u " Downloaded " + ComicName + " " + ComicIssue )
return
elif mode == ' want ' :
2012-10-01 15:01:21 +00:00
cdname = myDB . action ( " SELECT ComicName from comics where ComicID=? " , [ ComicID ] ) . fetchone ( )
ComicName = cdname [ ' ComicName ' ]
2012-09-13 15:27:34 +00:00
logger . info ( u " Marking " + ComicName + " issue: " + ComicIssue + " as wanted... " )
#---
#this should be on it's own somewhere
if IssueID is not None :
controlValueDict = { " IssueID " : IssueID }
newStatus = { " Status " : " Wanted " }
myDB . upsert ( " issues " , newStatus , controlValueDict )
#for future reference, the year should default to current year (.datetime)
2012-10-09 06:33:14 +00:00
issues = myDB . action ( " SELECT IssueDate FROM issues WHERE IssueID=? " , [ IssueID ] ) . fetchone ( )
2012-09-13 15:27:34 +00:00
if ComicYear == None :
ComicYear = str ( issues [ ' IssueDate ' ] ) [ : 4 ]
2012-12-31 16:52:16 +00:00
miy = myDB . action ( " SELECT * FROM comics WHERE ComicID=? " , [ ComicID ] ) . fetchone ( )
SeriesYear = miy [ ' ComicYear ' ]
AlternateSearch = miy [ ' AlternateSearch ' ]
foundcom = search . search_init ( ComicName , ComicIssue , ComicYear , SeriesYear , issues [ ' IssueDate ' ] , IssueID , AlternateSearch )
2012-09-13 15:27:34 +00:00
if foundcom == " yes " :
# file check to see if issue exists and update 'have' count
if IssueID is not None :
return updater . foundsearch ( ComicID , IssueID )
if ComicID :
raise cherrypy . HTTPRedirect ( " artistPage?ComicID= %s " % ComicID )
else :
raise cherrypy . HTTPRedirect ( redirect )
queueissue . exposed = True
def unqueueissue ( self , IssueID , ComicID ) :
myDB = db . DBConnection ( )
issue = myDB . action ( ' SELECT * FROM issues WHERE IssueID=? ' , [ IssueID ] ) . fetchone ( )
logger . info ( u " Marking " + issue [ ' ComicName ' ] + " issue # " + issue [ ' Issue_Number ' ] + " as skipped... " )
controlValueDict = { ' IssueID ' : IssueID }
newValueDict = { ' Status ' : ' Skipped ' }
myDB . upsert ( " issues " , newValueDict , controlValueDict )
raise cherrypy . HTTPRedirect ( " artistPage?ComicID= %s " % ComicID )
unqueueissue . exposed = True
def pullist ( self ) :
myDB = db . DBConnection ( )
popit = myDB . select ( " SELECT * FROM sqlite_master WHERE name= ' weekly ' and type= ' table ' " )
if popit :
weeklyresults = myDB . select ( " SELECT * from weekly " )
pulldate = myDB . action ( " SELECT * from weekly " ) . fetchone ( )
2012-12-16 18:41:01 +00:00
if pulldate is None :
return self . manualpull ( )
#raise cherrypy.HTTPRedirect("home")
2012-09-13 15:27:34 +00:00
else :
return self . manualpull ( )
return serve_template ( templatename = " weeklypull.html " , title = " Weekly Pull " , weeklyresults = weeklyresults , pulldate = pulldate [ ' SHIPDATE ' ] , pullfilter = False )
pullist . exposed = True
def filterpull ( self ) :
myDB = db . DBConnection ( )
weeklyresults = myDB . select ( " SELECT * from weekly " )
pulldate = myDB . action ( " SELECT * from weekly " ) . fetchone ( )
if pulldate is None :
raise cherrypy . HTTPRedirect ( " home " )
return serve_template ( templatename = " weeklypull.html " , title = " Weekly Pull " , weeklyresults = weeklyresults , pulldate = pulldate [ ' SHIPDATE ' ] , pullfilter = True )
filterpull . exposed = True
def manualpull ( self ) :
from mylar import weeklypull
threading . Thread ( target = weeklypull . pullit ( ) ) . start ( )
raise cherrypy . HTTPRedirect ( " pullist " )
manualpull . exposed = True
def upcoming ( self ) :
myDB = db . DBConnection ( )
#upcoming = myDB.select("SELECT * from issues WHERE ReleaseDate > date('now') order by ReleaseDate DESC")
upcoming = myDB . select ( " SELECT * from upcoming WHERE IssueDate > date( ' now ' ) order by IssueDate DESC " )
issues = myDB . select ( " SELECT * from issues WHERE Status= ' Wanted ' " )
#let's move any items from the upcoming table into the wanted table if the date has already passed.
2012-10-16 08:16:29 +00:00
#gather the list...
mvupcome = myDB . select ( " SELECT * from upcoming WHERE IssueDate < date( ' now ' ) order by IssueDate DESC " )
#get the issue ID's
for mvup in mvupcome :
2012-10-16 15:12:44 +00:00
myissue = myDB . action ( " SELECT * FROM issues WHERE Issue_Number=? " , [ mvup [ ' IssueNumber ' ] ] ) . fetchone ( )
2012-10-16 08:16:29 +00:00
if myissue is None : pass
else :
2012-10-16 15:12:44 +00:00
#print ("ComicName: " + str(myissue['ComicName']))
#print ("Issue number : " + str(myissue['Issue_Number']) )
2012-10-16 08:16:29 +00:00
mvcontroldict = { " IssueID " : myissue [ ' IssueID ' ] }
2012-10-16 15:12:44 +00:00
mvvalues = { " ComicID " : myissue [ ' ComicID ' ] ,
2012-10-16 08:16:29 +00:00
" Status " : " Wanted " }
2012-10-18 07:08:43 +00:00
myDB . upsert ( " issues " , mvvalues , mvcontroldict )
2012-10-16 08:16:29 +00:00
2012-10-21 15:30:26 +00:00
#remove old entry from upcoming so it won't try to continually download again.
deleteit = myDB . action ( " DELETE from upcoming WHERE ComicName=? AND IssueNumber=? " , [ mvup [ ' ComicName ' ] , mvup [ ' IssueNumber ' ] ] )
2012-09-13 15:27:34 +00:00
return serve_template ( templatename = " upcoming.html " , title = " Upcoming " , upcoming = upcoming , issues = issues )
upcoming . exposed = True
2012-09-24 05:17:29 +00:00
def searchScan ( self , name ) :
return serve_template ( templatename = " searchfix.html " , title = " Manage " , name = name )
searchScan . exposed = True
2012-09-13 15:27:34 +00:00
def manage ( self ) :
return serve_template ( templatename = " manage.html " , title = " Manage " )
manage . exposed = True
2012-09-18 13:13:42 +00:00
def manageComics ( self ) :
2012-09-13 15:27:34 +00:00
myDB = db . DBConnection ( )
comics = myDB . select ( ' SELECT * from comics order by ComicSortName COLLATE NOCASE ' )
2012-09-18 13:13:42 +00:00
return serve_template ( templatename = " managecomics.html " , title = " Manage Comics " , comics = comics )
manageComics . exposed = True
2012-09-13 15:27:34 +00:00
2012-09-18 04:00:43 +00:00
def manageIssues ( self ) :
2012-09-13 15:27:34 +00:00
myDB = db . DBConnection ( )
issues = myDB . select ( ' SELECT * from issues ' )
2012-09-24 05:17:29 +00:00
return serve_template ( templatename = " manageissues.html " , title = " Manage Issues " , issues = issues )
2012-09-18 13:13:42 +00:00
manageIssues . exposed = True
2012-09-13 15:27:34 +00:00
def manageNew ( self ) :
myDB = db . DBConnection ( )
newcomics = myDB . select ( ' SELECT * from newartists ' )
return serve_template ( templatename = " managenew.html " , title = " Manage New Artists " , newcomics = newcomics )
manageNew . exposed = True
2012-09-18 04:00:43 +00:00
def markComics ( self , action = None , * * args ) :
2012-09-13 15:27:34 +00:00
myDB = db . DBConnection ( )
2012-09-18 04:00:43 +00:00
comicsToAdd = [ ]
for ComicID in args :
2012-09-13 15:27:34 +00:00
if action == ' delete ' :
2012-09-18 04:00:43 +00:00
myDB . action ( ' DELETE from comics WHERE ComicID=? ' , [ ComicID ] )
myDB . action ( ' DELETE from issues WHERE ComicID=? ' , [ ComicID ] )
2012-09-13 15:27:34 +00:00
elif action == ' pause ' :
2012-09-18 04:00:43 +00:00
controlValueDict = { ' ComicID ' : ComicID }
2012-09-13 15:27:34 +00:00
newValueDict = { ' Status ' : ' Paused ' }
2012-09-18 04:00:43 +00:00
myDB . upsert ( " comics " , newValueDict , controlValueDict )
2012-09-13 15:27:34 +00:00
elif action == ' resume ' :
2012-09-18 04:00:43 +00:00
controlValueDict = { ' ComicID ' : ComicID }
2012-09-13 15:27:34 +00:00
newValueDict = { ' Status ' : ' Active ' }
2012-09-18 04:00:43 +00:00
myDB . upsert ( " comics " , newValueDict , controlValueDict )
2012-09-13 15:27:34 +00:00
else :
2012-09-18 04:00:43 +00:00
comicsToAdd . append ( ComicID )
if len ( comicsToAdd ) > 0 :
logger . debug ( " Refreshing comics: %s " % comicsToAdd )
threading . Thread ( target = importer . addComicIDListToDB , args = [ comicsToAdd ] ) . start ( )
2012-09-13 15:27:34 +00:00
raise cherrypy . HTTPRedirect ( " home " )
2012-09-18 04:00:43 +00:00
markComics . exposed = True
2012-09-13 15:27:34 +00:00
def forceUpdate ( self ) :
from mylar import updater
threading . Thread ( target = updater . dbUpdate ) . start ( )
raise cherrypy . HTTPRedirect ( " home " )
forceUpdate . exposed = True
def forceSearch ( self ) :
from mylar import search
threading . Thread ( target = search . searchforissue ) . start ( )
raise cherrypy . HTTPRedirect ( " home " )
forceSearch . exposed = True
def forceRescan ( self , ComicID ) :
threading . Thread ( target = updater . forceRescan , args = [ ComicID ] ) . start ( )
raise cherrypy . HTTPRedirect ( " artistPage?ComicID= %s " % ComicID )
forceRescan . exposed = True
def checkGithub ( self ) :
from mylar import versioncheck
versioncheck . checkGithub ( )
raise cherrypy . HTTPRedirect ( " home " )
checkGithub . exposed = True
def history ( self ) :
myDB = db . DBConnection ( )
history = myDB . select ( ''' SELECT * from snatched order by DateAdded DESC ''' )
return serve_template ( templatename = " history.html " , title = " History " , history = history )
return page
history . exposed = True
def logs ( self ) :
return serve_template ( templatename = " logs.html " , title = " Log " , lineList = mylar . LOG_LIST )
logs . exposed = True
def clearhistory ( self , type = None ) :
myDB = db . DBConnection ( )
if type == ' all ' :
logger . info ( u " Clearing all history " )
myDB . action ( ' DELETE from snatched ' )
else :
logger . info ( u " Clearing history where status is %s " % type )
myDB . action ( ' DELETE from snatched WHERE Status=? ' , [ type ] )
raise cherrypy . HTTPRedirect ( " history " )
clearhistory . exposed = True
def config ( self ) :
interface_dir = os . path . join ( mylar . PROG_DIR , ' data/interfaces/ ' )
interface_list = [ name for name in os . listdir ( interface_dir ) if os . path . isdir ( os . path . join ( interface_dir , name ) ) ]
2013-01-13 15:59:46 +00:00
# branch_history, err = mylar.versioncheck.runGit("log --oneline --pretty=format:'%h - %ar - %s' -n 4")
# br_hist = branch_history.replace("\n", "<br />\n")
2013-01-11 21:20:51 +00:00
2012-09-13 15:27:34 +00:00
config = {
" http_host " : mylar . HTTP_HOST ,
" http_user " : mylar . HTTP_USERNAME ,
" http_port " : mylar . HTTP_PORT ,
" http_pass " : mylar . HTTP_PASSWORD ,
2012-10-30 10:43:01 +00:00
" launch_browser " : helpers . checked ( mylar . LAUNCH_BROWSER ) ,
2012-12-20 10:39:37 +00:00
" logverbose " : helpers . checked ( mylar . LOGVERBOSE ) ,
2012-09-13 15:27:34 +00:00
" download_scan_interval " : mylar . DOWNLOAD_SCAN_INTERVAL ,
" nzb_search_interval " : mylar . SEARCH_INTERVAL ,
" libraryscan_interval " : mylar . LIBRARYSCAN_INTERVAL ,
" sab_host " : mylar . SAB_HOST ,
" sab_user " : mylar . SAB_USERNAME ,
" sab_api " : mylar . SAB_APIKEY ,
" sab_pass " : mylar . SAB_PASSWORD ,
" sab_cat " : mylar . SAB_CATEGORY ,
2013-01-13 15:59:46 +00:00
" sab_priority " : mylar . SAB_PRIORITY ,
2012-09-13 15:27:34 +00:00
" use_blackhole " : helpers . checked ( mylar . BLACKHOLE ) ,
" blackhole_dir " : mylar . BLACKHOLE_DIR ,
" usenet_retention " : mylar . USENET_RETENTION ,
" use_nzbsu " : helpers . checked ( mylar . NZBSU ) ,
" nzbsu_api " : mylar . NZBSU_APIKEY ,
" use_dognzb " : helpers . checked ( mylar . DOGNZB ) ,
" dognzb_api " : mylar . DOGNZB_APIKEY ,
2013-01-11 21:20:51 +00:00
" use_nzbx " : helpers . checked ( mylar . NZBX ) ,
2012-09-13 15:27:34 +00:00
" use_experimental " : helpers . checked ( mylar . EXPERIMENTAL ) ,
2012-12-16 17:57:02 +00:00
" use_newznab " : helpers . checked ( mylar . NEWZNAB ) ,
" newznab_host " : mylar . NEWZNAB_HOST ,
" newznab_api " : mylar . NEWZNAB_APIKEY ,
" newznab_enabled " : helpers . checked ( mylar . NEWZNAB_ENABLED ) ,
" extra_newznabs " : mylar . EXTRA_NEWZNABS ,
2012-09-13 15:27:34 +00:00
" destination_dir " : mylar . DESTINATION_DIR ,
2012-09-14 17:29:01 +00:00
" replace_spaces " : helpers . checked ( mylar . REPLACE_SPACES ) ,
" replace_char " : mylar . REPLACE_CHAR ,
2013-01-13 15:59:46 +00:00
" use_minsize " : mylar . USE_MINSIZE ,
" minsize " : mylar . MINSIZE ,
" use_maxsize " : mylar . USE_MAXSIZE ,
" maxsize " : mylar . MAXSIZE ,
2012-09-13 15:27:34 +00:00
" interface_list " : interface_list ,
" autowant_all " : helpers . checked ( mylar . AUTOWANT_ALL ) ,
" autowant_upcoming " : helpers . checked ( mylar . AUTOWANT_UPCOMING ) ,
2012-12-27 15:04:03 +00:00
" comic_cover_local " : helpers . checked ( mylar . COMIC_COVER_LOCAL ) ,
2012-09-13 15:27:34 +00:00
" pref_qual_0 " : helpers . radio ( mylar . PREFERRED_QUALITY , 0 ) ,
" pref_qual_1 " : helpers . radio ( mylar . PREFERRED_QUALITY , 1 ) ,
" pref_qual_3 " : helpers . radio ( mylar . PREFERRED_QUALITY , 3 ) ,
" pref_qual_2 " : helpers . radio ( mylar . PREFERRED_QUALITY , 2 ) ,
" move_files " : helpers . checked ( mylar . MOVE_FILES ) ,
" rename_files " : helpers . checked ( mylar . RENAME_FILES ) ,
" folder_format " : mylar . FOLDER_FORMAT ,
" file_format " : mylar . FILE_FORMAT ,
2012-10-30 10:43:01 +00:00
" zero_level " : helpers . checked ( mylar . ZERO_LEVEL ) ,
" zero_level_n " : mylar . ZERO_LEVEL_N ,
2012-12-27 15:04:03 +00:00
" enable_extra_scripts " : helpers . checked ( mylar . ENABLE_EXTRA_SCRIPTS ) ,
" extra_scripts " : mylar . EXTRA_SCRIPTS ,
2013-01-11 21:20:51 +00:00
" log_dir " : mylar . LOG_DIR ,
" branch " : version . MYLAR_VERSION ,
" br_type " : mylar . INSTALL_TYPE ,
" br_version " : mylar . versioncheck . getVersion ( ) ,
" py_version " : platform . python_version ( ) ,
" data_dir " : mylar . DATA_DIR ,
" prog_dir " : mylar . PROG_DIR ,
" cache_dir " : mylar . CACHE_DIR ,
2013-01-13 15:59:46 +00:00
" config_file " : mylar . CONFIG_FILE
# "branch_history" : br_hist
2012-09-13 15:27:34 +00:00
}
return serve_template ( templatename = " config.html " , title = " Settings " , config = config )
config . exposed = True
2013-01-11 21:20:51 +00:00
def error_change ( self , comicid , errorgcd ) :
if errorgcd [ : 5 ] . isdigit ( ) :
print ( " GCD-ID detected : + str(errorgcd)[:5] " )
print ( " I ' m assuming you know what you ' re doing - going to force-match. " )
self . from_Exceptions ( comicid = comicid , gcdid = errorgcd )
else :
print ( " Assuming rewording of Comic - adjusting to : " + str ( errorgcd ) )
self . addComic ( errorgcd )
error_change . exposed = True
2012-09-13 15:27:34 +00:00
2013-01-13 15:59:46 +00:00
def comic_config ( self , com_location , alt_search , fuzzy_year , ComicID ) :
2012-09-28 15:39:44 +00:00
myDB = db . DBConnection ( )
2013-01-13 15:59:46 +00:00
print ( " fuzzy: " + fuzzy_year )
if fuzzy_year == ' 0 ' : fuzzy_string = " None "
elif fuzzy_year == ' 1 ' : fuzzy_string = " Remove Year "
elif fuzzy_year == ' 2 ' : fuzzy_string = " Fuzzy Year "
# "pref_qual_0" : helpers.radio(mylar.PREFERRED_QUALITY, 0),
# "pref_qual_1" : helpers.radio(mylar.PREFERRED_QUALITY, 1),
# "pref_qual_3" : helpers.radio(mylar.PREFERRED_QUALITY, 3),
# "pref_qual_2" : helpers.radio(mylar.PREFERRED_QUALITY, 2),
2013-01-06 08:51:44 +00:00
#--- this is for multipe search terms............
#--- works, just need to redo search.py to accomodate multiple search terms
# ffs_alt = []
# if '+' in alt_search:
#find first +
# ffs = alt_search.find('+')
# ffs_alt.append(alt_search[:ffs])
# ffs_alt_st = str(ffs_alt[0])
# print("ffs_alt: " + str(ffs_alt[0]))
# split the entire string by the delimter +
# ffs_test = alt_search.split('+')
# if len(ffs_test) > 0:
# print("ffs_test names: " + str(len(ffs_test)))
# ffs_count = len(ffs_test)
# n=1
# while (n < ffs_count):
# ffs_alt.append(ffs_test[n])
# print("adding : " + str(ffs_test[n]))
#print("ffs_alt : " + str(ffs_alt))
# ffs_alt_st = str(ffs_alt_st) + "..." + str(ffs_test[n])
# n+=1
# asearch = ffs_alt
# else:
# asearch = alt_search
asearch = str ( alt_search )
2012-09-28 15:39:44 +00:00
controlValueDict = { ' ComicID ' : ComicID }
2012-12-31 16:52:16 +00:00
newValues = { " ComicLocation " : com_location ,
2013-01-13 15:59:46 +00:00
" AlternateSearch " : str ( asearch ) ,
" UseFuzzy " : fuzzy_year }
2012-09-28 15:39:44 +00:00
#"QUALalt_vers": qual_altvers,
#"QUALScanner": qual_scanner,
#"QUALtype": qual_type,
#"QUALquality": qual_quality
#}
2013-01-06 08:51:44 +00:00
2012-12-20 11:52:21 +00:00
#force the check/creation of directory com_location here
if os . path . isdir ( str ( com_location ) ) :
logger . info ( u " Validating Directory ( " + str ( com_location ) + " ). Already exists! Continuing... " )
else :
logger . fdebug ( " Updated Directory doesn ' t exist! - attempting to create now. " )
try :
os . makedirs ( str ( com_location ) )
logger . info ( u " Directory successfully created at: " + str ( com_location ) )
except OSError :
logger . error ( u " Could not create comicdir : " + str ( com_location ) )
2012-09-28 15:39:44 +00:00
myDB . upsert ( " comics " , newValues , controlValueDict )
2012-09-29 04:56:28 +00:00
raise cherrypy . HTTPRedirect ( " artistPage?ComicID= %s " % ComicID )
2012-09-28 15:39:44 +00:00
comic_config . exposed = True
2012-09-13 15:27:34 +00:00
2012-12-20 10:39:37 +00:00
def configUpdate ( self , http_host = ' 0.0.0.0 ' , http_username = None , http_port = 8090 , http_password = None , launch_browser = 0 , logverbose = 0 , download_scan_interval = None , nzb_search_interval = None , libraryscan_interval = None ,
2013-01-13 15:59:46 +00:00
sab_host = None , sab_username = None , sab_apikey = None , sab_password = None , sab_category = None , sab_priority = None , log_dir = None , blackhole = 0 , blackhole_dir = None ,
2013-01-11 21:20:51 +00:00
usenet_retention = None , nzbsu = 0 , nzbsu_apikey = None , dognzb = 0 , dognzb_apikey = None , nzbx = 0 , newznab = 0 , newznab_host = None , newznab_apikey = None , newznab_enabled = 0 ,
2012-09-13 15:27:34 +00:00
raw = 0 , raw_provider = None , raw_username = None , raw_password = None , raw_groups = None , experimental = 0 ,
2012-12-27 15:04:03 +00:00
preferred_quality = 0 , move_files = 0 , rename_files = 0 , folder_format = None , file_format = None , enable_extra_scripts = 0 , extra_scripts = None ,
2013-01-13 15:59:46 +00:00
destination_dir = None , replace_spaces = 0 , replace_char = None , use_minsize = 0 , minsize = None , use_maxsize = 0 , maxsize = None , autowant_all = 0 , autowant_upcoming = 0 , comic_cover_local = 0 , zero_level = 0 , zero_level_n = None , interface = None , * * kwargs ) :
2012-09-13 15:27:34 +00:00
mylar . HTTP_HOST = http_host
mylar . HTTP_PORT = http_port
mylar . HTTP_USERNAME = http_username
mylar . HTTP_PASSWORD = http_password
mylar . LAUNCH_BROWSER = launch_browser
2012-12-20 10:39:37 +00:00
mylar . LOGVERBOSE = logverbose
2012-09-13 15:27:34 +00:00
mylar . DOWNLOAD_SCAN_INTERVAL = download_scan_interval
mylar . SEARCH_INTERVAL = nzb_search_interval
mylar . LIBRARYSCAN_INTERVAL = libraryscan_interval
mylar . SAB_HOST = sab_host
mylar . SAB_USERNAME = sab_username
mylar . SAB_PASSWORD = sab_password
mylar . SAB_APIKEY = sab_apikey
mylar . SAB_CATEGORY = sab_category
2012-09-28 15:39:44 +00:00
mylar . SAB_PRIORITY = sab_priority
2012-09-13 15:27:34 +00:00
mylar . BLACKHOLE = blackhole
mylar . BLACKHOLE_DIR = blackhole_dir
mylar . USENET_RETENTION = usenet_retention
mylar . NZBSU = nzbsu
mylar . NZBSU_APIKEY = nzbsu_apikey
mylar . DOGNZB = dognzb
mylar . DOGNZB_APIKEY = dognzb_apikey
2013-01-11 21:20:51 +00:00
mylar . NZBX = nzbx
2012-09-13 15:27:34 +00:00
mylar . RAW = raw
mylar . RAW_PROVIDER = raw_provider
mylar . RAW_USERNAME = raw_username
mylar . RAW_PASSWORD = raw_password
mylar . RAW_GROUPS = raw_groups
mylar . EXPERIMENTAL = experimental
2012-12-16 17:57:02 +00:00
mylar . NEWZNAB = newznab
mylar . NEWZNAB_HOST = newznab_host
mylar . NEWZNAB_APIKEY = newznab_apikey
mylar . NEWZNAB_ENABLED = newznab_enabled
2012-09-13 15:27:34 +00:00
mylar . PREFERRED_QUALITY = int ( preferred_quality )
mylar . MOVE_FILES = move_files
mylar . RENAME_FILES = rename_files
2012-09-14 17:29:01 +00:00
mylar . REPLACE_SPACES = replace_spaces
mylar . REPLACE_CHAR = replace_char
2012-10-30 10:43:01 +00:00
mylar . ZERO_LEVEL = zero_level
mylar . ZERO_LEVEL_N = zero_level_n
2013-01-13 15:59:46 +00:00
mylar . USE_MINSIZE = use_minsize
mylar . MINSIZE = minsize
mylar . USE_MAXSIZE = use_maxsize
mylar . MAXSIZE = maxsize
2012-09-13 15:27:34 +00:00
mylar . FOLDER_FORMAT = folder_format
mylar . FILE_FORMAT = file_format
mylar . DESTINATION_DIR = destination_dir
mylar . AUTOWANT_ALL = autowant_all
mylar . AUTOWANT_UPCOMING = autowant_upcoming
2012-12-27 15:04:03 +00:00
mylar . COMIC_COVER_LOCAL = comic_cover_local
2012-09-13 15:27:34 +00:00
mylar . INTERFACE = interface
2012-12-27 15:04:03 +00:00
mylar . ENABLE_EXTRA_SCRIPTS = enable_extra_scripts
mylar . EXTRA_SCRIPTS = extra_scripts
2012-09-13 15:27:34 +00:00
mylar . LOG_DIR = log_dir
2012-12-16 17:57:02 +00:00
# Handle the variable config options. Note - keys with False values aren't getting passed
mylar . EXTRA_NEWZNABS = [ ]
2012-12-16 18:41:01 +00:00
2012-12-16 17:57:02 +00:00
for kwarg in kwargs :
if kwarg . startswith ( ' newznab_host ' ) :
newznab_number = kwarg [ 12 : ]
newznab_host = kwargs [ ' newznab_host ' + newznab_number ]
newznab_api = kwargs [ ' newznab_api ' + newznab_number ]
try :
newznab_enabled = int ( kwargs [ ' newznab_enabled ' + newznab_number ] )
except KeyError :
newznab_enabled = 0
mylar . EXTRA_NEWZNABS . append ( ( newznab_host , newznab_api , newznab_enabled ) )
2012-12-16 18:41:01 +00:00
2012-12-16 17:57:02 +00:00
# Sanity checking
if mylar . SEARCH_INTERVAL < 360 :
logger . info ( " Search interval too low. Resetting to 6 hour minimum " )
mylar . SEARCH_INTERVAL = 360
2012-12-16 18:41:01 +00:00
2012-12-16 17:57:02 +00:00
# Write the config
2012-09-13 15:27:34 +00:00
mylar . config_write ( )
raise cherrypy . HTTPRedirect ( " config " )
configUpdate . exposed = True
def shutdown ( self ) :
mylar . SIGNAL = ' shutdown '
message = ' Shutting Down... '
return serve_template ( templatename = " shutdown.html " , title = " Shutting Down " , message = message , timer = 15 )
return page
shutdown . exposed = True
def restart ( self ) :
mylar . SIGNAL = ' restart '
message = ' Restarting... '
return serve_template ( templatename = " shutdown.html " , title = " Restarting " , message = message , timer = 30 )
restart . exposed = True
def update ( self ) :
mylar . SIGNAL = ' update '
2012-09-17 05:12:40 +00:00
message = ' Updating...<br/><small>Main screen will appear in 60s</small> '
2012-09-13 15:27:34 +00:00
return serve_template ( templatename = " shutdown.html " , title = " Updating " , message = message , timer = 30 )
return page
update . exposed = True
def getInfo ( self , ComicID = None , IssueID = None ) :
from mylar import cache
info_dict = cache . getInfo ( ComicID , IssueID )
return simplejson . dumps ( info_dict )
getInfo . exposed = True
def getComicArtwork ( self , ComicID = None , imageURL = None ) :
from mylar import cache
logger . info ( u " Retrieving image for : " + comicID )
return cache . getArtwork ( ComicID , imageURL )
getComicArtwork . exposed = True