2012-09-14 17:29:01 +00:00
# This file is part of Mylar.
2012-09-13 15:27:34 +00:00
#
2012-09-14 17:29:01 +00:00
# Mylar is free software: you can redistribute it and/or modify
2012-09-13 15:27:34 +00:00
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
2012-09-14 17:29:01 +00:00
# Mylar is distributed in the hope that it will be useful,
2012-09-13 15:27:34 +00:00
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
2012-09-14 17:29:01 +00:00
# along with Mylar. If not, see <http://www.gnu.org/licenses/>.
2012-09-13 15:27:34 +00:00
2013-01-15 19:02:32 +00:00
from __future__ import with_statement
2012-09-13 15:27:34 +00:00
import os
import cherrypy
2012-12-31 16:52:16 +00:00
import datetime
2013-01-06 08:51:44 +00:00
import re
2012-09-13 15:27:34 +00:00
from mako . template import Template
from mako . lookup import TemplateLookup
from mako import exceptions
import time
import threading
2013-01-11 21:20:51 +00:00
import csv
import platform
2013-02-06 19:55:23 +00:00
import urllib
import shutil
2012-09-13 15:27:34 +00:00
import mylar
2013-05-25 06:18:00 +00:00
from mylar import logger , db , importer , mb , search , filechecker , helpers , updater , parseit , weeklypull , PostProcessor , version , librarysync , moveit #,rsscheck
2012-09-13 15:27:34 +00:00
#from mylar.helpers import checked, radio, today
import lib . simplejson as simplejson
from operator import itemgetter
def serve_template ( templatename , * * kwargs ) :
interface_dir = os . path . join ( str ( mylar . PROG_DIR ) , ' data/interfaces/ ' )
template_dir = os . path . join ( str ( interface_dir ) , mylar . INTERFACE )
2013-02-27 08:28:40 +00:00
2012-09-13 15:27:34 +00:00
_hplookup = TemplateLookup ( directories = [ template_dir ] )
try :
template = _hplookup . get_template ( templatename )
return template . render ( * * kwargs )
except :
return exceptions . html_error_template ( ) . render ( )
class WebInterface ( object ) :
def index ( self ) :
raise cherrypy . HTTPRedirect ( " home " )
index . exposed = True
def home ( self ) :
myDB = db . DBConnection ( )
comics = myDB . select ( ' SELECT * from comics order by ComicSortName COLLATE NOCASE ' )
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
if mylar . ANNUALS_ON :
annuals_on = True
else :
annuals_on = False
return serve_template ( templatename = " index.html " , title = " Home " , comics = comics , annuals_on = annuals_on )
2012-09-13 15:27:34 +00:00
home . exposed = True
2013-07-10 01:45:10 +00:00
def comicDetails ( self , ComicID ) :
2012-09-13 15:27:34 +00:00
myDB = db . DBConnection ( )
comic = myDB . action ( ' SELECT * FROM comics WHERE ComicID=? ' , [ ComicID ] ) . fetchone ( )
if comic is None :
raise cherrypy . HTTPRedirect ( " home " )
2013-03-13 01:41:45 +00:00
#let's cheat. :)
2013-03-21 17:09:10 +00:00
#comicskip = myDB.select('SELECT * from comics order by ComicSortName COLLATE NOCASE')
skipno = len ( mylar . COMICSORT [ ' SortOrder ' ] )
lastno = mylar . COMICSORT [ ' LastOrderNo ' ]
lastid = mylar . COMICSORT [ ' LastOrderID ' ]
2013-03-13 01:41:45 +00:00
series = { }
2013-03-30 16:53:07 +00:00
if skipno == 0 :
#it's a blank db, let's just null the values and go.
series [ ' Current ' ] = None
series [ ' Previous ' ] = None
series [ ' Next ' ] = None
2013-03-21 17:09:10 +00:00
i = 0
while ( i < skipno ) :
cskip = mylar . COMICSORT [ ' SortOrder ' ] [ i ]
2013-03-14 08:33:16 +00:00
if cskip [ ' ComicID ' ] == ComicID :
2013-03-21 17:09:10 +00:00
cursortnum = cskip [ ' ComicOrder ' ]
2013-03-14 08:33:16 +00:00
series [ ' Current ' ] = cskip [ ' ComicID ' ]
2013-03-21 17:09:10 +00:00
if cursortnum == 0 :
2013-03-14 08:33:16 +00:00
# if first record, set the Previous record to the LAST record.
2013-03-21 17:09:10 +00:00
previous = lastid
2013-03-14 08:33:16 +00:00
else :
2013-03-21 17:09:10 +00:00
previous = mylar . COMICSORT [ ' SortOrder ' ] [ i - 1 ] [ ' ComicID ' ]
2013-03-14 08:33:16 +00:00
2013-03-21 17:09:10 +00:00
# if last record, set the Next record to the FIRST record.
if cursortnum == lastno :
next = mylar . COMICSORT [ ' SortOrder ' ] [ 0 ] [ ' ComicID ' ]
else :
next = mylar . COMICSORT [ ' SortOrder ' ] [ i + 1 ] [ ' ComicID ' ]
series [ ' Previous ' ] = previous
series [ ' Next ' ] = next
2013-03-13 01:41:45 +00:00
break
2013-03-21 17:09:10 +00:00
i + = 1
2013-02-25 15:36:43 +00:00
issues = myDB . select ( ' SELECT * FROM issues WHERE ComicID=? order by Int_IssueNumber DESC ' , [ ComicID ] )
isCounts = { }
isCounts [ 1 ] = 0 #1 skipped
isCounts [ 2 ] = 0 #2 wanted
isCounts [ 3 ] = 0 #3 archived
isCounts [ 4 ] = 0 #4 downloaded
2013-07-30 04:57:37 +00:00
isCounts [ 5 ] = 0 #5 ignored
#isCounts[6] = 0 #6 read
2013-02-25 15:36:43 +00:00
for curResult in issues :
2013-07-30 04:57:37 +00:00
baseissues = { ' skipped ' : 1 , ' wanted ' : 2 , ' archived ' : 3 , ' downloaded ' : 4 , ' ignored ' : 5 }
2013-02-25 15:36:43 +00:00
for seas in baseissues :
if seas in curResult [ ' Status ' ] . lower ( ) :
sconv = baseissues [ seas ]
isCounts [ sconv ] + = 1
continue
isCounts = {
" Skipped " : str ( isCounts [ 1 ] ) ,
" Wanted " : str ( isCounts [ 2 ] ) ,
" Archived " : str ( isCounts [ 3 ] ) ,
2013-07-30 04:57:37 +00:00
" Downloaded " : str ( isCounts [ 4 ] ) ,
" Ignored " : str ( isCounts [ 5 ] )
2013-02-25 15:36:43 +00:00
}
2013-01-15 22:41:00 +00:00
usethefuzzy = comic [ ' UseFuzzy ' ]
2013-01-23 07:34:50 +00:00
skipped2wanted = " 0 "
2013-01-15 22:41:00 +00:00
if usethefuzzy is None : usethefuzzy = " 0 "
2013-08-19 06:14:47 +00:00
force_continuing = comic [ ' ForceContinuing ' ]
if force_continuing is None : force_continuing = 0
2012-09-14 17:29:01 +00:00
comicConfig = {
2013-01-13 15:59:46 +00:00
" comiclocation " : mylar . COMIC_LOCATION ,
2013-01-15 22:41:00 +00:00
" fuzzy_year0 " : helpers . radio ( int ( usethefuzzy ) , 0 ) ,
" fuzzy_year1 " : helpers . radio ( int ( usethefuzzy ) , 1 ) ,
2013-01-23 07:34:50 +00:00
" fuzzy_year2 " : helpers . radio ( int ( usethefuzzy ) , 2 ) ,
2013-08-19 06:14:47 +00:00
" skipped2wanted " : helpers . checked ( skipped2wanted ) ,
" force_continuing " : helpers . checked ( force_continuing )
2012-09-14 17:29:01 +00:00
}
2013-03-21 17:09:10 +00:00
if mylar . ANNUALS_ON :
annuals = myDB . select ( " SELECT * FROM annuals WHERE ComicID=? " , [ ComicID ] )
else : annuals = None
2013-07-10 01:45:10 +00:00
return serve_template ( templatename = " comicdetails.html " , title = comic [ ' ComicName ' ] , comic = comic , issues = issues , comicConfig = comicConfig , isCounts = isCounts , series = series , annuals = annuals )
comicDetails . exposed = True
2013-02-25 15:36:43 +00:00
2013-02-27 08:28:40 +00:00
def searchit ( self , name , issue = None , mode = None , type = None ) :
if type is None : type = ' comic ' # let's default this to comic search only for the time being (will add story arc, characters, etc later)
else : print ( str ( type ) + " mode enabled. " )
2012-09-13 15:27:34 +00:00
#mode dictates type of search:
# --series ... search for comicname displaying all results
# --pullseries ... search for comicname displaying a limited # of results based on issue
# --want ... individual comics
if mode is None : mode = ' series '
if len ( name ) == 0 :
raise cherrypy . HTTPRedirect ( " home " )
if type == ' comic ' and mode == ' pullseries ' :
searchresults = mb . findComic ( name , mode , issue = issue )
elif type == ' comic ' and mode == ' series ' :
searchresults = mb . findComic ( name , mode , issue = None )
elif type == ' comic ' and mode == ' want ' :
searchresults = mb . findComic ( name , mode , issue )
2013-02-27 08:28:40 +00:00
elif type == ' storyarc ' :
searchresults = mb . findComic ( name , mode , issue = None , storyarc = ' yes ' )
2013-01-28 20:31:43 +00:00
2013-02-25 15:36:43 +00:00
searchresults = sorted ( searchresults , key = itemgetter ( ' comicyear ' , ' issues ' ) , reverse = True )
2012-09-13 15:27:34 +00:00
#print ("Results: " + str(searchresults))
2013-02-09 03:34:02 +00:00
return serve_template ( templatename = " searchresults.html " , title = ' Search Results for: " ' + name + ' " ' , searchresults = searchresults , type = type , imported = None , ogcname = None )
2012-09-13 15:27:34 +00:00
searchit . exposed = True
2013-02-09 03:34:02 +00:00
def addComic ( self , comicid , comicname = None , comicyear = None , comicimage = None , comicissues = None , comicpublisher = None , imported = None , ogcname = None ) :
2012-10-16 08:16:29 +00:00
myDB = db . DBConnection ( )
2013-02-13 01:27:24 +00:00
if imported == " confirm " :
# if it's coming from the importer and it's just for confirmation, record the right selection and break.
# if it's 'confirmed' coming in as the value for imported
# the ogcname will be the original comicid that is either correct/incorrect (doesn't matter which)
#confirmedid is the selected series (comicid) with the letter C at the beginning to denote Confirmed.
# then sql the original comicid which will hit on all the results for the given series.
# iterate through, and overwrite the existing watchmatch with the new chosen 'C' + comicid value
confirmedid = " C " + str ( comicid )
confirms = myDB . action ( " SELECT * FROM importresults WHERE WatchMatch=? " , [ ogcname ] )
if confirms is None :
2013-02-25 15:36:43 +00:00
logger . Error ( " There are no results that match...this is an ERROR. " )
2013-02-13 01:27:24 +00:00
else :
for confirm in confirms :
controlValue = { " impID " : confirm [ ' impID ' ] }
newValue = { " WatchMatch " : str ( confirmedid ) }
myDB . upsert ( " importresults " , newValue , controlValue )
2013-03-29 04:02:35 +00:00
self . importResults ( )
2013-02-13 01:27:24 +00:00
return
2012-10-16 08:16:29 +00:00
sresults = [ ]
2013-01-11 21:20:51 +00:00
cresults = [ ]
2012-10-16 08:16:29 +00:00
mismatch = " no "
2013-02-25 15:36:43 +00:00
#print ("comicid: " + str(comicid))
#print ("comicname: " + str(comicname))
#print ("comicyear: " + str(comicyear))
#print ("comicissues: " + str(comicissues))
#print ("comicimage: " + str(comicimage))
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
if not mylar . CV_ONLY :
2012-10-16 08:16:29 +00:00
#here we test for exception matches (ie. comics spanning more than one volume, known mismatches, etc).
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
CV_EXcomicid = myDB . action ( " SELECT * from exceptions WHERE ComicID=? " , [ comicid ] ) . fetchone ( )
if CV_EXcomicid is None : # pass #
gcdinfo = parseit . GCDScraper ( comicname , comicyear , comicissues , comicid , quickmatch = " yes " )
if gcdinfo == " No Match " :
2013-01-13 15:59:46 +00:00
#when it no matches, the image will always be blank...let's fix it.
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
cvdata = mylar . cv . getComic ( comicid , ' comic ' )
comicimage = cvdata [ ' ComicImage ' ]
updater . no_searchresults ( comicid )
nomatch = " true "
u_comicname = comicname . encode ( ' utf-8 ' ) . strip ( )
logger . info ( " I couldn ' t find an exact match for " + u_comicname + " ( " + str ( comicyear ) + " ) - gathering data for Error-Checking screen (this could take a minute)... " )
i = 0
loopie , cnt = parseit . ComChk ( comicname , comicyear , comicpublisher , comicissues , comicid )
logger . info ( " total count : " + str ( cnt ) )
while ( i < cnt ) :
try :
stoopie = loopie [ ' comchkchoice ' ] [ i ]
except ( IndexError , TypeError ) :
break
cresults . append ( {
' ComicID ' : stoopie [ ' ComicID ' ] ,
' ComicName ' : stoopie [ ' ComicName ' ] . decode ( ' utf-8 ' , ' replace ' ) ,
' ComicYear ' : stoopie [ ' ComicYear ' ] ,
' ComicIssues ' : stoopie [ ' ComicIssues ' ] ,
' ComicURL ' : stoopie [ ' ComicURL ' ] ,
' ComicPublisher ' : stoopie [ ' ComicPublisher ' ] . decode ( ' utf-8 ' , ' replace ' ) ,
' GCDID ' : stoopie [ ' GCDID ' ]
} )
i + = 1
if imported != ' None ' :
2013-03-29 04:02:35 +00:00
#if it's from an import and it has to go through the UEC, return the values
#to the calling function and have that return the template
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
return cresults
else :
return serve_template ( templatename = " searchfix.html " , title = " Error Check " , comicname = comicname , comicid = comicid , comicyear = comicyear , comicimage = comicimage , comicissues = comicissues , cresults = cresults , imported = None , ogcname = None )
2013-03-29 04:02:35 +00:00
else :
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
nomatch = " false "
logger . info ( u " Quick match success..continuing. " )
2013-01-11 21:20:51 +00:00
else :
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
if CV_EXcomicid [ ' variloop ' ] == ' 99 ' :
logger . info ( u " mismatched name...autocorrecting to correct GID and auto-adding. " )
mismatch = " yes "
if CV_EXcomicid [ ' NewComicID ' ] == ' none ' :
logger . info ( u " multi-volume series detected " )
testspx = CV_EXcomicid [ ' GComicID ' ] . split ( ' / ' )
for exc in testspx :
fakeit = parseit . GCDAdd ( testspx )
howmany = int ( CV_EXcomicid [ ' variloop ' ] )
t = 0
while ( t < = howmany ) :
try :
sres = fakeit [ ' serieschoice ' ] [ t ]
except IndexError :
break
sresults . append ( {
' ComicID ' : sres [ ' ComicID ' ] ,
' ComicName ' : sres [ ' ComicName ' ] ,
' ComicYear ' : sres [ ' ComicYear ' ] ,
' ComicIssues ' : sres [ ' ComicIssues ' ] ,
' ComicPublisher ' : sres [ ' ComicPublisher ' ] ,
' ComicCover ' : sres [ ' ComicCover ' ]
} )
t + = 1
#searchfix(-1).html is for misnamed comics and wrong years.
#searchfix-2.html is for comics that span multiple volumes.
return serve_template ( templatename = " searchfix-2.html " , title = " In-Depth Results " , sresults = sresults )
2013-02-09 03:34:02 +00:00
#print ("imported is: " + str(imported))
threading . Thread ( target = importer . addComictoDB , args = [ comicid , mismatch , None , imported , ogcname ] ) . start ( )
2013-07-10 01:45:10 +00:00
raise cherrypy . HTTPRedirect ( " comicDetails?ComicID= %s " % comicid )
2012-09-13 15:27:34 +00:00
addComic . exposed = True
2012-10-16 08:16:29 +00:00
2013-07-10 01:45:10 +00:00
def wanted_Export ( self ) :
import unicodedata
myDB = db . DBConnection ( )
wantlist = myDB . action ( " SELECT * FROM issues WHERE Status= ' Wanted ' AND ComicName NOT NULL " )
if wantlist is None :
logger . info ( " There aren ' t any issues marked as Wanted. Aborting Export. " )
return
#write it a wanted_list.csv
logger . info ( " gathered data - writing to csv... " )
except_file = os . path . join ( mylar . DATA_DIR , " wanted_list.csv " )
if os . path . exists ( except_file ) :
try :
os . remove ( except_file )
except ( OSError , IOError ) :
pass
wcount = 0
with open ( str ( except_file ) , ' w+ ' ) as f :
headrow = " SeriesName,SeriesYear,IssueNumber,IssueDate,ComicID,IssueID "
headerline = headrow . decode ( ' utf-8 ' , ' ignore ' )
f . write ( ' %s \n ' % ( headerline . encode ( ' ascii ' , ' replace ' ) . strip ( ) ) )
for want in wantlist :
wantcomic = myDB . action ( " SELECT * FROM comics WHERE ComicID=? " , [ want [ ' ComicID ' ] ] ) . fetchone ( )
exceptln = wantcomic [ ' ComicName ' ] . encode ( ' ascii ' , ' replace ' ) + " , " + str ( wantcomic [ ' ComicYear ' ] ) + " , " + str ( want [ ' Issue_Number ' ] ) + " , " + str ( want [ ' IssueDate ' ] ) + " , " + str ( want [ ' ComicID ' ] ) + " , " + str ( want [ ' IssueID ' ] )
logger . fdebug ( exceptln )
wcount + = 1
f . write ( ' %s \n ' % ( exceptln . encode ( ' ascii ' , ' replace ' ) . strip ( ) ) )
logger . info ( " Successfully wrote to csv file " + str ( wcount ) + " entries from your Wanted list. " )
raise cherrypy . HTTPRedirect ( " home " )
wanted_Export . exposed = True
2013-03-29 04:02:35 +00:00
def from_Exceptions ( self , comicid , gcdid , comicname = None , comicyear = None , comicissues = None , comicpublisher = None , imported = None , ogcname = None ) :
2013-03-08 01:36:36 +00:00
import unicodedata
2013-01-11 21:20:51 +00:00
mismatch = " yes "
#write it to the custom_exceptions.csv and reload it so that importer will pick it up and do it's thing :)
#custom_exceptions in this format...
#99, (comicid), (gcdid), none
logger . info ( " saving new information into custom_exceptions.csv... " )
2013-03-12 16:06:44 +00:00
except_info = " none # " + str ( comicname ) + " -( " + str ( comicyear ) + " ) \n "
2013-01-15 19:02:32 +00:00
except_file = os . path . join ( mylar . DATA_DIR , " custom_exceptions.csv " )
if not os . path . exists ( except_file ) :
try :
csvfile = open ( str ( except_file ) , ' rb ' )
csvfile . close ( )
except ( OSError , IOError ) :
logger . error ( " Could not locate " + str ( except_file ) + " file. Make sure it ' s in datadir: " + mylar . DATA_DIR + " with proper permissions. " )
return
2013-03-08 01:36:36 +00:00
exceptln = " 99, " + str ( comicid ) + " , " + str ( gcdid ) + " , " + str ( except_info )
exceptline = exceptln . decode ( ' utf-8 ' , ' ignore ' )
2013-01-15 19:02:32 +00:00
with open ( str ( except_file ) , ' a ' ) as f :
2013-03-08 01:36:36 +00:00
#f.write('%s,%s,%s,%s\n' % ("99", comicid, gcdid, except_info)
2013-03-16 17:59:56 +00:00
f . write ( ' %s \n ' % ( exceptline . encode ( ' ascii ' , ' replace ' ) . strip ( ) ) )
2013-01-11 21:20:51 +00:00
logger . info ( " re-loading csv file so it ' s all nice and current. " )
mylar . csv_load ( )
2013-03-29 04:02:35 +00:00
if imported :
threading . Thread ( target = importer . addComictoDB , args = [ comicid , mismatch , None , imported , ogcname ] ) . start ( )
else :
threading . Thread ( target = importer . addComictoDB , args = [ comicid , mismatch ] ) . start ( )
2013-07-10 01:45:10 +00:00
raise cherrypy . HTTPRedirect ( " comicDetails?ComicID= %s " % comicid )
2013-01-11 21:20:51 +00:00
from_Exceptions . exposed = True
2012-10-16 08:16:29 +00:00
def GCDaddComic ( self , comicid , comicname = None , comicyear = None , comicissues = None , comiccover = None , comicpublisher = None ) :
#since we already know most of the info, let's add it to the db so we can reference it later.
myDB = db . DBConnection ( )
gcomicid = " G " + str ( comicid )
comicyear_len = comicyear . find ( ' ' , 2 )
comyear = comicyear [ comicyear_len + 1 : comicyear_len + 5 ]
2013-01-24 17:01:27 +00:00
if comyear . isdigit ( ) :
logger . fdebug ( " Series year set to : " + str ( comyear ) )
else :
logger . fdebug ( " Invalid Series year detected - trying to adjust from " + str ( comyear ) )
#comicyear_len above will trap wrong year if it's 10 October 2010 - etc ( 2000 AD)...
find_comicyear = comicyear . split ( )
for i in find_comicyear :
if len ( i ) == 4 :
logger . fdebug ( " Series year detected as : " + str ( i ) )
comyear = str ( i )
continue
logger . fdebug ( " Series year set to: " + str ( comyear ) )
2012-10-16 08:16:29 +00:00
controlValueDict = { ' ComicID ' : gcomicid }
newValueDict = { ' ComicName ' : comicname ,
' ComicYear ' : comyear ,
' ComicPublished ' : comicyear ,
' ComicPublisher ' : comicpublisher ,
' ComicImage ' : comiccover ,
' Total ' : comicissues }
myDB . upsert ( " comics " , newValueDict , controlValueDict )
threading . Thread ( target = importer . GCDimport , args = [ gcomicid ] ) . start ( )
2013-07-10 01:45:10 +00:00
raise cherrypy . HTTPRedirect ( " comicDetails?ComicID= %s " % gcomicid )
2012-10-16 08:16:29 +00:00
GCDaddComic . exposed = True
2012-10-30 10:43:01 +00:00
def post_process ( self , nzb_name , nzb_folder ) :
logger . info ( u " Starting postprocessing for : " + str ( nzb_name ) )
2012-12-27 15:04:03 +00:00
PostProcess = PostProcessor . PostProcessor ( nzb_name , nzb_folder )
result = PostProcess . Process ( )
2012-10-30 10:43:01 +00:00
#result = post_results.replace("\n","<br />\n")
2013-08-17 17:54:41 +00:00
if nzb_name == ' Manual Run ' or nzb_name == ' Manual+Run ' :
raise cherrypy . HTTPRedirect ( " home " )
else :
return result
2012-10-30 10:43:01 +00:00
#log2screen = threading.Thread(target=PostProcessor.PostProcess, args=[nzb_name,nzb_folder]).start()
#return serve_template(templatename="postprocess.html", title="postprocess")
post_process . exposed = True
2012-09-13 15:27:34 +00:00
def pauseArtist ( self , ComicID ) :
logger . info ( u " Pausing comic: " + ComicID )
myDB = db . DBConnection ( )
controlValueDict = { ' ComicID ' : ComicID }
newValueDict = { ' Status ' : ' Paused ' }
myDB . upsert ( " comics " , newValueDict , controlValueDict )
2013-07-10 01:45:10 +00:00
raise cherrypy . HTTPRedirect ( " comicDetails?ComicID= %s " % ComicID )
2012-09-13 15:27:34 +00:00
pauseArtist . exposed = True
def resumeArtist ( self , ComicID ) :
logger . info ( u " Resuming comic: " + ComicID )
myDB = db . DBConnection ( )
controlValueDict = { ' ComicID ' : ComicID }
newValueDict = { ' Status ' : ' Active ' }
myDB . upsert ( " comics " , newValueDict , controlValueDict )
2013-07-10 01:45:10 +00:00
raise cherrypy . HTTPRedirect ( " comicDetails?ComicID= %s " % ComicID )
2012-09-13 15:27:34 +00:00
resumeArtist . exposed = True
def deleteArtist ( self , ComicID ) :
myDB = db . DBConnection ( )
comic = myDB . action ( ' SELECT * from comics WHERE ComicID=? ' , [ ComicID ] ) . fetchone ( )
2012-09-24 05:17:29 +00:00
if comic [ ' ComicName ' ] is None : ComicName = " None "
else : ComicName = comic [ ' ComicName ' ]
2013-03-08 01:38:05 +00:00
logger . info ( u " Deleting all traces of Comic: " + ComicName )
2012-09-13 15:27:34 +00:00
myDB . action ( ' DELETE from comics WHERE ComicID=? ' , [ ComicID ] )
myDB . action ( ' DELETE from issues WHERE ComicID=? ' , [ ComicID ] )
2013-02-06 19:55:23 +00:00
myDB . action ( ' DELETE from upcoming WHERE ComicID=? ' , [ ComicID ] )
2013-05-06 02:34:01 +00:00
helpers . ComicSort ( sequence = ' update ' )
2012-09-13 15:27:34 +00:00
raise cherrypy . HTTPRedirect ( " home " )
deleteArtist . exposed = True
2013-08-06 00:53:10 +00:00
def wipenzblog ( self , ComicID = None ) :
logger . fdebug ( " Wiping NZBLOG in it ' s entirety. You should NOT be downloading while doing this or else you ' ll lose the log for the download. " )
myDB = db . DBConnection ( )
if ComicID is None :
myDB . action ( ' DROP table nzblog ' )
logger . fdebug ( " Deleted nzblog table. " )
myDB . action ( ' CREATE TABLE IF NOT EXISTS nzblog (IssueID TEXT, NZBName TEXT, SARC TEXT) ' )
logger . fdebug ( " Re-created nzblog table. " )
wipenzblog . exposed = True
2012-09-13 15:27:34 +00:00
def refreshArtist ( self , ComicID ) :
2012-10-16 08:16:29 +00:00
myDB = db . DBConnection ( )
mismatch = " no "
2013-04-07 18:06:36 +00:00
if not mylar . CV_ONLY or ComicID [ : 1 ] == " G " :
CV_EXcomicid = myDB . action ( " SELECT * from exceptions WHERE ComicID=? " , [ ComicID ] ) . fetchone ( )
if CV_EXcomicid is None : pass
else :
if CV_EXcomicid [ ' variloop ' ] == ' 99 ' :
mismatch = " yes "
if ComicID [ : 1 ] == " G " : threading . Thread ( target = importer . GCDimport , args = [ ComicID ] ) . start ( )
else : threading . Thread ( target = importer . addComictoDB , args = [ ComicID , mismatch ] ) . start ( )
2012-10-16 08:16:29 +00:00
else :
2013-04-07 18:06:36 +00:00
if mylar . CV_ONETIMER == 1 :
2013-04-07 19:18:26 +00:00
logger . fdebug ( " CV_OneTimer option enabled... " )
2013-04-07 18:06:36 +00:00
#in order to update to JUST CV_ONLY, we need to delete the issues for a given series so it's a clea$
2013-04-07 19:18:26 +00:00
logger . fdebug ( " Gathering the status of all issues for the series. " )
2013-04-07 18:06:36 +00:00
issues = myDB . select ( ' SELECT * FROM issues WHERE ComicID=? ' , [ ComicID ] )
2013-07-30 04:57:37 +00:00
if mylar . ANNUALS_ON :
issues + = myDB . select ( ' SELECT * FROM annuals WHERE ComicID=? ' , [ ComicID ] )
2013-04-07 18:06:36 +00:00
#store the issues' status for a given comicid, after deleting and readding, flip the status back to$
2013-04-07 19:18:26 +00:00
logger . fdebug ( " Deleting all issue data. " )
2013-04-07 18:06:36 +00:00
myDB . select ( ' DELETE FROM issues WHERE ComicID=? ' , [ ComicID ] )
2013-07-30 04:57:37 +00:00
myDB . select ( ' DELETE FROM annuals WHERE ComicID=? ' , [ ComicID ] )
2013-04-07 19:18:26 +00:00
logger . fdebug ( " Refreshing the series and pulling in new data using only CV. " )
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
mylar . importer . addComictoDB ( ComicID , mismatch , calledfrom = ' dbupdate ' )
2013-07-30 04:57:37 +00:00
issues_new = myDB . select ( ' SELECT * FROM issues WHERE ComicID=? ' , [ ComicID ] )
annuals = [ ]
ann_list = [ ]
if mylar . ANNUALS_ON :
annuals_list = myDB . select ( ' SELECT * FROM annuals WHERE ComicID=? ' , [ ComicID ] )
ann_list + = annuals_list
issues_new + = annuals_list
2013-04-07 19:18:26 +00:00
logger . fdebug ( " Attempting to put the Status ' back how they were. " )
2013-04-07 18:06:36 +00:00
icount = 0
for issue in issues :
for issuenew in issues_new :
if issuenew [ ' IssueID ' ] == issue [ ' IssueID ' ] and issuenew [ ' Status ' ] != issue [ ' Status ' ] :
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
#if the status is now Downloaded/Snatched, keep status.
if issuenew [ ' Status ' ] == ' Downloaded ' or issue [ ' Status ' ] == ' Snatched ' :
break
2013-04-07 18:06:36 +00:00
#change the status to the previous status
ctrlVAL = { ' IssueID ' : issue [ ' IssueID ' ] }
newVAL = { ' Status ' : issue [ ' Status ' ] }
2013-07-30 04:57:37 +00:00
if any ( d [ ' IssueID ' ] == str ( issue [ ' IssueID ' ] ) for d in ann_list ) :
logger . fdebug ( " annual detected for " + str ( issue [ ' IssueID ' ] ) + " #: " + str ( issue [ ' Issue_Number ' ] ) )
myDB . upsert ( " Annuals " , newVAL , ctrlVAL )
else :
myDB . upsert ( " Issues " , newVAL , ctrlVAL )
2013-04-07 18:06:36 +00:00
icount + = 1
break
2013-04-07 19:18:26 +00:00
logger . info ( " In the process of converting the data to CV, I changed the status of " + str ( icount ) + " issues. " )
2013-04-07 18:06:36 +00:00
else :
mylar . importer . addComictoDB ( ComicID , mismatch )
2013-07-10 01:45:10 +00:00
raise cherrypy . HTTPRedirect ( " comicDetails?ComicID= %s " % ComicID )
2012-09-13 15:27:34 +00:00
refreshArtist . exposed = True
def editIssue ( self , ComicID ) :
myDB = db . DBConnection ( )
comic = myDB . action ( ' SELECT * from comics WHERE ComicID=? ' , [ ComicID ] ) . fetchone ( )
title = ' Now Editing ' + comic [ ' ComicName ' ]
return serve_template ( templatename = " editcomic.html " , title = title , comic = comic )
2013-07-10 01:45:10 +00:00
#raise cherrypy.HTTPRedirect("comicDetails?ComicID=%s" & ComicID)
2012-09-13 15:27:34 +00:00
editIssue . exposed = True
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
def force_rss ( self ) :
logger . info ( ' attempting to run RSS Check Forcibly ' )
chktorrent = mylar . rsscheck . tehMain ( forcerss = True )
if chktorrent :
logger . info ( ' Successfully ran RSS Force Check. ' )
return
force_rss . exposed = True
2013-05-25 06:18:00 +00:00
#def chkTorrents(self, ComicName, pickfeed):
# chktorrent = rsscheck.torrents(ComicName,pickfeed)
# if chktorrent:
# print ("Torrent Check completed.")
# raise cherrypy.HTTPRedirect("home")
#chkTorrents.exposed = True
2012-10-16 08:16:29 +00:00
def markissues ( self , action = None , * * args ) :
2012-09-13 15:27:34 +00:00
myDB = db . DBConnection ( )
2012-10-16 08:16:29 +00:00
issuesToAdd = [ ]
2012-10-30 10:43:01 +00:00
issuestoArchive = [ ]
2012-09-13 15:27:34 +00:00
if action == ' WantedNew ' :
newaction = ' Wanted '
else :
newaction = action
for IssueID in args :
2013-02-09 03:34:02 +00:00
#print ("issueID: " + str(IssueID) + "... " + str(newaction))
2013-07-06 02:42:58 +00:00
if IssueID is None or ' issue_table ' in IssueID or ' history_table ' in IssueID :
2013-02-09 03:34:02 +00:00
continue
2012-09-13 15:27:34 +00:00
else :
2012-10-16 08:16:29 +00:00
mi = myDB . action ( " SELECT * FROM issues WHERE IssueID=? " , [ IssueID ] ) . fetchone ( )
2013-07-30 04:57:37 +00:00
annchk = ' no '
if mi is None :
if mylar . ANNUALS_ON :
mi = myDB . action ( " SELECT * FROM annuals WHERE IssueID=? " , [ IssueID ] ) . fetchone ( )
annchk = ' yes '
2012-10-16 08:16:29 +00:00
miyr = myDB . action ( " SELECT ComicYear FROM comics WHERE ComicID=? " , [ mi [ ' ComicID ' ] ] ) . fetchone ( )
2012-10-30 10:43:01 +00:00
if action == ' Downloaded ' :
if mi [ ' Status ' ] == " Skipped " or mi [ ' Status ' ] == " Wanted " :
logger . info ( u " Cannot change status to %s as comic is not Snatched or Downloaded " % ( newaction ) )
2013-02-09 03:34:02 +00:00
# continue
2012-10-30 10:43:01 +00:00
elif action == ' Archived ' :
logger . info ( u " Marking %s %s as %s " % ( mi [ ' ComicName ' ] , mi [ ' Issue_Number ' ] , newaction ) )
#updater.forceRescan(mi['ComicID'])
issuestoArchive . append ( IssueID )
2013-07-06 02:42:58 +00:00
elif action == ' Wanted ' or action == ' Retry ' :
if action == ' Retry ' : newaction = ' Wanted '
2012-10-30 10:43:01 +00:00
logger . info ( u " Marking %s %s as %s " % ( mi [ ' ComicName ' ] , mi [ ' Issue_Number ' ] , newaction ) )
issuesToAdd . append ( IssueID )
2013-02-09 03:34:02 +00:00
elif action == ' Skipped ' :
logger . info ( u " Marking " + str ( IssueID ) + " as Skipped " )
2013-07-06 02:42:58 +00:00
elif action == ' Clear ' :
myDB . action ( " DELETE FROM snatched WHERE IssueID=? " , [ IssueID ] )
2012-10-16 08:16:29 +00:00
controlValueDict = { " IssueID " : IssueID }
newValueDict = { " Status " : newaction }
2013-07-30 04:57:37 +00:00
if annchk == ' yes ' :
myDB . upsert ( " annuals " , newValueDict , controlValueDict )
else :
myDB . upsert ( " issues " , newValueDict , controlValueDict )
logger . fdebug ( " updated...to " + str ( newaction ) )
2012-10-30 10:43:01 +00:00
if len ( issuestoArchive ) > 0 :
updater . forceRescan ( mi [ ' ComicID ' ] )
2012-10-16 08:16:29 +00:00
if len ( issuesToAdd ) > 0 :
2013-02-09 03:34:02 +00:00
logger . debug ( " Marking issues: %s as Wanted " % ( issuesToAdd ) )
2012-10-16 08:16:29 +00:00
threading . Thread ( target = search . searchIssueIDList , args = [ issuesToAdd ] ) . start ( )
2012-10-16 15:53:46 +00:00
#if IssueID:
2013-07-10 01:45:10 +00:00
raise cherrypy . HTTPRedirect ( " comicDetails?ComicID= %s " % mi [ ' ComicID ' ] )
2012-10-16 15:53:46 +00:00
#else:
# raise cherrypy.HTTPRedirect("upcoming")
2012-09-13 15:27:34 +00:00
markissues . exposed = True
def addArtists ( self , * * args ) :
threading . Thread ( target = importer . artistlist_to_mbids , args = [ args , True ] ) . start ( )
raise cherrypy . HTTPRedirect ( " home " )
addArtists . exposed = True
2013-07-01 05:19:15 +00:00
def queueissue ( self , mode , ComicName = None , ComicID = None , ComicYear = None , ComicIssue = None , IssueID = None , new = False , redirect = None , SeriesYear = None , SARC = None , IssueArcID = None ) :
print " ComicID: " + str ( ComicID )
print " mode: " + str ( mode )
2012-12-31 16:52:16 +00:00
now = datetime . datetime . now ( )
2012-09-19 04:38:25 +00:00
myDB = db . DBConnection ( )
2012-09-13 15:27:34 +00:00
#mode dictates type of queue - either 'want' for individual comics, or 'series' for series watchlist.
if ComicID is None and mode == ' series ' :
issue = None
raise cherrypy . HTTPRedirect ( " searchit?name= %s &issue= %s &mode= %s " % ( ComicName , ' None ' , ' series ' ) )
elif ComicID is None and mode == ' pullseries ' :
# we can limit the search by including the issue # and searching for
# comics that have X many issues
raise cherrypy . HTTPRedirect ( " searchit?name= %s &issue= %s &mode= %s " % ( ComicName , ' None ' , ' pullseries ' ) )
2013-04-22 03:11:12 +00:00
elif ComicID is None and mode == ' readlist ' :
2013-07-01 05:19:15 +00:00
print " blahblah "
2013-04-22 03:11:12 +00:00
# this is for marking individual comics from a readlist to be downloaded.
# Because there is no associated ComicID or IssueID, follow same pattern as in 'pullwant'
# except we know the Year
2013-05-25 06:18:00 +00:00
if SARC is None :
# it's just a readlist queue (no storyarc mode enabled)
SARC = True
IssueArcID = None
else :
logger . info ( u " Story Arc : " + str ( SARC ) + " queueing selected issue... " )
logger . info ( u " IssueArcID : " + str ( IssueArcID ) )
2013-04-22 03:11:12 +00:00
if ComicYear is None : ComicYear = SeriesYear
logger . info ( u " Marking " + ComicName + " " + ComicIssue + " as wanted... " )
2013-07-01 05:19:15 +00:00
controlValueDict = { " IssueArcID " : IssueArcID }
newStatus = { " Status " : " Wanted " }
myDB . upsert ( " readinglist " , newStatus , controlValueDict )
2013-07-30 05:28:09 +00:00
foundcom , prov = search . search_init ( ComicName = ComicName , IssueNumber = ComicIssue , ComicYear = ComicYear , SeriesYear = None , IssueDate = None , IssueID = None , AlternateSearch = None , UseFuzzy = None , ComicVersion = None , SARC = SARC , IssueArcID = IssueArcID )
2013-04-22 03:11:12 +00:00
if foundcom == " yes " :
logger . info ( u " Downloaded " + ComicName + " # " + ComicIssue + " ( " + str ( ComicYear ) + " ) " )
2013-07-01 05:19:15 +00:00
#raise cherrypy.HTTPRedirect("readlist")
return foundcom
2013-04-22 03:11:12 +00:00
2012-09-13 15:27:34 +00:00
elif ComicID is None and mode == ' pullwant ' :
#this is for marking individual comics from the pullist to be downloaded.
#because ComicID and IssueID will both be None due to pullist, it's probably
#better to set both to some generic #, and then filter out later...
cyear = myDB . action ( " SELECT SHIPDATE FROM weekly " ) . fetchone ( )
ComicYear = str ( cyear [ ' SHIPDATE ' ] ) [ : 4 ]
2012-12-31 16:52:16 +00:00
if ComicYear == ' ' : ComicYear = now . year
2012-09-13 15:27:34 +00:00
logger . info ( u " Marking " + ComicName + " " + ComicIssue + " as wanted... " )
2013-07-30 05:28:09 +00:00
foundcom , prov = search . search_init ( ComicName = ComicName , IssueNumber = ComicIssue , ComicYear = ComicYear , SeriesYear = None , IssueDate = cyear [ ' SHIPDATE ' ] , IssueID = None , AlternateSearch = None , UseFuzzy = None , ComicVersion = None )
2012-09-13 15:27:34 +00:00
if foundcom == " yes " :
logger . info ( u " Downloaded " + ComicName + " " + ComicIssue )
2013-04-12 02:14:27 +00:00
raise cherrypy . HTTPRedirect ( " pullist " )
#return
2013-07-01 05:19:15 +00:00
elif mode == ' want ' or mode == ' want_ann ' :
2012-10-01 15:01:21 +00:00
cdname = myDB . action ( " SELECT ComicName from comics where ComicID=? " , [ ComicID ] ) . fetchone ( )
ComicName = cdname [ ' ComicName ' ]
2012-09-13 15:27:34 +00:00
controlValueDict = { " IssueID " : IssueID }
newStatus = { " Status " : " Wanted " }
2013-07-01 05:19:15 +00:00
if mode == ' want ' :
logger . info ( u " Marking " + ComicName + " issue: " + ComicIssue + " as wanted... " )
myDB . upsert ( " issues " , newStatus , controlValueDict )
else :
logger . info ( u " Marking " + ComicName + " Annual: " + ComicIssue + " as wanted... " )
myDB . upsert ( " annuals " , newStatus , controlValueDict )
#---
#this should be on it's own somewhere
#if IssueID is not None:
# controlValueDict = {"IssueID": IssueID}
# newStatus = {"Status": "Wanted"}
# myDB.upsert("issues", newStatus, controlValueDict)
2012-09-13 15:27:34 +00:00
#for future reference, the year should default to current year (.datetime)
2013-07-01 05:19:15 +00:00
if mode == ' want ' :
issues = myDB . action ( " SELECT IssueDate FROM issues WHERE IssueID=? " , [ IssueID ] ) . fetchone ( )
elif mode == ' want_ann ' :
issues = myDB . action ( " SELECT IssueDate FROM annuals WHERE IssueID=? " , [ IssueID ] ) . fetchone ( )
2012-09-13 15:27:34 +00:00
if ComicYear == None :
ComicYear = str ( issues [ ' IssueDate ' ] ) [ : 4 ]
2012-12-31 16:52:16 +00:00
miy = myDB . action ( " SELECT * FROM comics WHERE ComicID=? " , [ ComicID ] ) . fetchone ( )
SeriesYear = miy [ ' ComicYear ' ]
AlternateSearch = miy [ ' AlternateSearch ' ]
2013-02-06 19:55:23 +00:00
UseAFuzzy = miy [ ' UseFuzzy ' ]
2013-03-08 03:07:14 +00:00
ComicVersion = miy [ ' ComicVersion ' ]
2013-08-04 05:57:21 +00:00
foundcom , prov = search . search_init ( ComicName , ComicIssue , ComicYear , SeriesYear , issues [ ' IssueDate ' ] , IssueID , AlternateSearch , UseAFuzzy , ComicVersion , mode = mode , ComicID = ComicID )
2012-09-13 15:27:34 +00:00
if foundcom == " yes " :
# file check to see if issue exists and update 'have' count
if IssueID is not None :
2013-07-01 05:19:15 +00:00
logger . info ( " passing to updater. " )
2013-07-30 05:28:09 +00:00
return updater . foundsearch ( ComicID , IssueID , mode = mode , provider = prov )
2012-09-13 15:27:34 +00:00
if ComicID :
2013-07-10 01:45:10 +00:00
raise cherrypy . HTTPRedirect ( " comicDetails?ComicID= %s " % ComicID )
2012-09-13 15:27:34 +00:00
else :
raise cherrypy . HTTPRedirect ( redirect )
queueissue . exposed = True
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
def unqueueissue ( self , IssueID , ComicID , ComicName = None , Issue = None , FutureID = None ) :
2012-09-13 15:27:34 +00:00
myDB = db . DBConnection ( )
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
if ComicName is None :
issue = myDB . action ( ' SELECT * FROM issues WHERE IssueID=? ' , [ IssueID ] ) . fetchone ( )
annchk = ' no '
if issue is None :
if mylar . ANNUALS_ON :
issue = myDB . action ( ' SELECT * FROM annuals WHERE IssueID=? ' , [ IssueID ] ) . fetchone ( )
annchk = ' yes '
logger . info ( u " Marking " + issue [ ' ComicName ' ] + " issue # " + issue [ ' Issue_Number ' ] + " as skipped... " )
controlValueDict = { " IssueID " : IssueID }
newValueDict = { " Status " : " Skipped " }
if annchk == ' yes ' :
myDB . upsert ( " annuals " , newValueDict , controlValueDict )
else :
myDB . upsert ( " issues " , newValueDict , controlValueDict )
raise cherrypy . HTTPRedirect ( " comicDetails?ComicID= %s " % ComicID )
2013-07-30 04:57:37 +00:00
else :
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
#if ComicName is not None, then it's from the FuturePull list that we're 'unwanting' an issue.
#ComicID may be present if it's a watch from the Watchlist, otherwise it won't exist.
if ComicID is not None and ComicID != ' None ' :
logger . info ( ' comicid present: ' + str ( ComicID ) )
thefuture = myDB . action ( ' SELECT * FROM future WHERE ComicID=? ' , [ ComicID ] ) . fetchone ( )
else :
logger . info ( ' FutureID: ' + str ( FutureID ) )
logger . info ( ' no comicid - ComicName: ' + str ( ComicName ) + ' -- Issue: # ' + str ( Issue ) )
thefuture = myDB . action ( ' SELECT * FROM future WHERE FutureID=? ' , [ FutureID ] ) . fetchone ( )
if thefuture is None :
logger . info ( ' Cannot find the corresponding issue in the Futures List for some reason. This is probably an Error. ' )
else :
logger . info ( ' Marking ' + thefuture [ ' COMIC ' ] + ' issue # ' + thefuture [ ' ISSUE ' ] + ' as skipped... ' )
if ComicID is not None and ComicID != ' None ' :
cVDict = { " ComicID " : thefuture [ ' ComicID ' ] }
else :
cVDict = { " FutureID " : thefuture [ ' FutureID ' ] }
nVDict = { " Status " : " Skipped " }
logger . info ( ' cVDict: ' + str ( cVDict ) )
logger . info ( ' nVDict: ' + str ( nVDict ) )
myDB . upsert ( " future " , nVDict , cVDict )
2012-09-13 15:27:34 +00:00
unqueueissue . exposed = True
2013-02-17 10:31:18 +00:00
def archiveissue ( self , IssueID ) :
myDB = db . DBConnection ( )
issue = myDB . action ( ' SELECT * FROM issues WHERE IssueID=? ' , [ IssueID ] ) . fetchone ( )
2013-07-30 04:57:37 +00:00
annchk = ' no '
if issue is None :
if mylar . ANNUALS_ON :
issue = myDB . action ( ' SELECT * FROM annuals WHERE IssueID=? ' , [ IssueID ] ) . fetchone ( )
annchk = ' yes '
2013-02-17 10:31:18 +00:00
logger . info ( u " Marking " + issue [ ' ComicName ' ] + " issue # " + issue [ ' Issue_Number ' ] + " as archived... " )
controlValueDict = { ' IssueID ' : IssueID }
newValueDict = { ' Status ' : ' Archived ' }
2013-07-30 04:57:37 +00:00
if annchk == ' yes ' :
myDB . upsert ( " annuals " , newValueDict , controlValueDict )
else :
myDB . upsert ( " issues " , newValueDict , controlValueDict )
2013-07-10 01:45:10 +00:00
raise cherrypy . HTTPRedirect ( " comicDetails?ComicID= %s " % issue [ ' ComicID ' ] )
2013-02-17 10:31:18 +00:00
archiveissue . exposed = True
2012-09-13 15:27:34 +00:00
def pullist ( self ) :
myDB = db . DBConnection ( )
2013-01-28 20:31:43 +00:00
weeklyresults = [ ]
2012-09-13 15:27:34 +00:00
popit = myDB . select ( " SELECT * FROM sqlite_master WHERE name= ' weekly ' and type= ' table ' " )
if popit :
2013-01-28 20:31:43 +00:00
w_results = myDB . select ( " SELECT PUBLISHER, ISSUE, COMIC, STATUS from weekly " )
for weekly in w_results :
2013-09-11 18:49:26 +00:00
x = None
try :
x = float ( weekly [ ' ISSUE ' ] )
except ValueError , e :
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
if ' au ' in weekly [ ' ISSUE ' ] . lower ( ) or ' ai ' in weekly [ ' ISSUE ' ] . lower ( ) or ' .inh ' in weekly [ ' ISSUE ' ] . lower ( ) or ' .now ' in weekly [ ' ISSUE ' ] . lower ( ) :
2013-09-11 18:49:26 +00:00
x = weekly [ ' ISSUE ' ]
if x is not None :
2013-01-28 20:31:43 +00:00
weeklyresults . append ( {
" PUBLISHER " : weekly [ ' PUBLISHER ' ] ,
" ISSUE " : weekly [ ' ISSUE ' ] ,
" COMIC " : weekly [ ' COMIC ' ] ,
" STATUS " : weekly [ ' STATUS ' ]
} )
weeklyresults = sorted ( weeklyresults , key = itemgetter ( ' PUBLISHER ' , ' COMIC ' ) , reverse = False )
2012-09-13 15:27:34 +00:00
pulldate = myDB . action ( " SELECT * from weekly " ) . fetchone ( )
2012-12-16 18:41:01 +00:00
if pulldate is None :
return self . manualpull ( )
#raise cherrypy.HTTPRedirect("home")
2012-09-13 15:27:34 +00:00
else :
return self . manualpull ( )
2013-05-04 04:40:32 +00:00
weekfold = os . path . join ( mylar . DESTINATION_DIR , pulldate [ ' SHIPDATE ' ] )
return serve_template ( templatename = " weeklypull.html " , title = " Weekly Pull " , weeklyresults = weeklyresults , pulldate = pulldate [ ' SHIPDATE ' ] , pullfilter = True , weekfold = weekfold )
2012-09-13 15:27:34 +00:00
pullist . exposed = True
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
def futurepull ( self ) :
from mylar import solicit
#get month-year here, and self-populate in future
now = datetime . datetime . now ( )
if len ( str ( now . month ) ) != 2 :
month = ' 0 ' + str ( now . month )
else :
month = str ( now . month )
year = str ( now . year )
logger . fdebug ( ' month = ' + str ( month ) )
logger . fdebug ( ' year = ' + str ( year ) )
threading . Thread ( target = solicit . solicit ( month , year ) ) . start ( )
raise cherrypy . HTTPRedirect ( " home " )
futurepull . exposed = True
def futurepulllist ( self ) :
myDB = db . DBConnection ( )
futureresults = [ ]
popit = myDB . select ( " SELECT * FROM sqlite_master WHERE name= ' future ' and type= ' table ' " )
if popit :
f_results = myDB . select ( " SELECT SHIPDATE, PUBLISHER, ISSUE, COMIC, EXTRA, STATUS, ComicID, FutureID from future " )
for future in f_results :
x = None
if future [ ' ISSUE ' ] is None : break
try :
x = float ( future [ ' ISSUE ' ] )
except ValueError , e :
if ' au ' in future [ ' ISSUE ' ] . lower ( ) or ' ai ' in future [ ' ISSUE ' ] . lower ( ) or ' .inh ' in future [ ' ISSUE ' ] . lower ( ) or ' .now ' in future [ ' ISSUE ' ] . lower ( ) :
x = future [ ' ISSUE ' ]
if future [ ' EXTRA ' ] == ' N/A ' or future [ ' EXTRA ' ] == ' ' :
future_extra = ' '
else :
future_extra = future [ ' EXTRA ' ]
if ' (of ' in future [ ' EXTRA ' ] . lower ( ) :
future_extra = re . sub ( ' [ \ ( \ )] ' , ' ' , future [ ' EXTRA ' ] )
if x is not None :
futureresults . append ( {
" SHIPDATE " : future [ ' SHIPDATE ' ] ,
" PUBLISHER " : future [ ' PUBLISHER ' ] ,
" ISSUE " : future [ ' ISSUE ' ] ,
" COMIC " : future [ ' COMIC ' ] ,
" EXTRA " : future_extra ,
" STATUS " : future [ ' STATUS ' ] ,
" COMICID " : future [ ' ComicID ' ] ,
" FUTUREID " : future [ ' FutureID ' ]
} )
futureresults = sorted ( futureresults , key = itemgetter ( ' SHIPDATE ' , ' PUBLISHER ' , ' COMIC ' ) , reverse = False )
else :
logger . error ( ' No results to post for upcoming issues...something is probably wrong ' )
return
return serve_template ( templatename = " futurepull.html " , title = " future Pull " , futureresults = futureresults , pullfilter = True )
futurepulllist . exposed = True
def add2futurewatchlist ( self , ComicName , Issue , Publisher , ShipDate , FutureID ) :
logger . info ( ' Adding ' + ComicName + ' # ' + str ( Issue ) + ' to future upcoming watchlist ' )
myDB = db . DBConnection ( )
chkfuture = myDB . action ( ' SELECT * FROM futureupcoming WHERE ComicName=? AND IssueNumber=? ' , [ ComicName , Issue ] ) . fetchone ( )
if chkfuture is not None :
logger . info ( ' Already on Future Upcoming list - not adding at this time. ' )
return
newCtrl = { " ComicName " : ComicName ,
" IssueNumber " : Issue ,
" Publisher " : Publisher }
newVal = { " Status " : " Wanted " ,
" IssueDate " : ShipDate }
myDB . upsert ( " futureupcoming " , newVal , newCtrl )
fCtrl = { " FutureID " : FutureID }
fVal = { " Status " : " Wanted " }
myDB . upsert ( " future " , fVal , fCtrl )
add2futurewatchlist . exposed = True
2012-09-13 15:27:34 +00:00
def filterpull ( self ) :
myDB = db . DBConnection ( )
weeklyresults = myDB . select ( " SELECT * from weekly " )
pulldate = myDB . action ( " SELECT * from weekly " ) . fetchone ( )
if pulldate is None :
raise cherrypy . HTTPRedirect ( " home " )
2013-01-28 20:31:43 +00:00
return serve_template ( templatename = " weeklypull.html " , title = " Weekly Pull " , weeklyresults = weeklyresults , pulldate = pulldate [ ' SHIPDATE ' ] , pullfilter = True )
2012-09-13 15:27:34 +00:00
filterpull . exposed = True
def manualpull ( self ) :
from mylar import weeklypull
threading . Thread ( target = weeklypull . pullit ( ) ) . start ( )
raise cherrypy . HTTPRedirect ( " pullist " )
manualpull . exposed = True
2013-04-08 16:31:41 +00:00
def pullrecreate ( self ) :
from mylar import weeklypull
myDB = db . DBConnection ( )
myDB . action ( " DROP TABLE weekly " )
mylar . dbcheck ( )
logger . info ( " Deleted existed pull-list data. Recreating Pull-list... " )
threading . Thread ( target = weeklypull . pullit ( forcecheck = ' yes ' ) ) . start ( )
raise cherrypy . HTTPRedirect ( " pullist " )
pullrecreate . exposed = True
2012-09-13 15:27:34 +00:00
def upcoming ( self ) :
myDB = db . DBConnection ( )
#upcoming = myDB.select("SELECT * from issues WHERE ReleaseDate > date('now') order by ReleaseDate DESC")
2014-01-16 20:25:02 +00:00
upcomingdata = myDB . select ( " SELECT * from upcoming WHERE IssueID is NULL order by IssueDate DESC " )
upcoming = [ ]
for upc in upcomingdata :
if len ( upc [ ' IssueDate ' ] ) < = 7 :
#if it's less than or equal 7, then it's a future-pull so let's check the date and display
#tmpdate = datetime.datetime.com
tmpdatethis = upc [ ' IssueDate ' ]
if tmpdatethis [ : 2 ] == ' 20 ' :
tmpdate = tmpdatethis #in correct format of yyyymm
else :
findst = tmpdatethis . find ( ' - ' ) #find the '-'
tmpdate = tmpdatethis [ findst + 1 : ] + tmpdatethis [ : findst ] #rebuild in format of yyyymm
timenow = datetime . datetime . now ( ) . strftime ( ' % Y % m ' )
logger . fdebug ( ' comparing pubdate of: ' + str ( tmpdate ) + ' to now date of: ' + str ( timenow ) )
if int ( tmpdate ) > = int ( timenow ) :
if upc [ ' Status ' ] == ' Wanted ' :
upcoming . append ( { " ComicName " : upc [ ' ComicName ' ] ,
" IssueNumber " : upc [ ' IssueNumber ' ] ,
" IssueDate " : upc [ ' IssueDate ' ] ,
" ComicID " : upc [ ' ComicID ' ] ,
" IssueID " : upc [ ' IssueID ' ] ,
" Status " : upc [ ' Status ' ] ,
" DisplayComicName " : upc [ ' DisplayComicName ' ] } )
else :
#if it's greater than 7 it's a full date, and shouldn't be displayed ;)
timenow = datetime . datetime . now ( ) . strftime ( ' % Y % m %d ' ) #convert to yyyymmdd
tmpdate = re . sub ( " [^0-9] " , " " , upc [ ' IssueDate ' ] ) #convert date to numerics only (should be in yyyymmdd)
logger . fdebug ( ' comparing pubdate of: ' + str ( tmpdate ) + ' to now date of: ' + str ( timenow ) )
if int ( tmpdate ) > = int ( timenow ) :
if upc [ ' Status ' ] == ' Wanted ' :
upcoming . append ( { " ComicName " : upc [ ' ComicName ' ] ,
" IssueNumber " : upc [ ' IssueNumber ' ] ,
" IssueDate " : upc [ ' IssueDate ' ] ,
" ComicID " : upc [ ' ComicID ' ] ,
" IssueID " : upc [ ' IssueID ' ] ,
" Status " : upc [ ' Status ' ] ,
" DisplayComicName " : upc [ ' DisplayComicName ' ] } )
2012-09-13 15:27:34 +00:00
issues = myDB . select ( " SELECT * from issues WHERE Status= ' Wanted ' " )
2013-07-30 04:57:37 +00:00
ann_list = [ ]
if mylar . ANNUALS_ON :
#let's add the annuals to the wanted table so people can see them
#ComicName wasn't present in db initially - added on startup chk now.
annuals_list = myDB . select ( " SELECT * FROM annuals WHERE Status= ' Wanted ' " )
ann_list + = annuals_list
issues + = annuals_list
2012-09-13 15:27:34 +00:00
#let's move any items from the upcoming table into the wanted table if the date has already passed.
2012-10-16 08:16:29 +00:00
#gather the list...
mvupcome = myDB . select ( " SELECT * from upcoming WHERE IssueDate < date( ' now ' ) order by IssueDate DESC " )
#get the issue ID's
for mvup in mvupcome :
2013-05-15 09:09:43 +00:00
myissue = myDB . action ( " SELECT * FROM issues WHERE IssueID=? " , [ mvup [ ' IssueID ' ] ] ) . fetchone ( )
#myissue = myDB.action("SELECT * FROM issues WHERE Issue_Number=?", [mvup['IssueNumber']]).fetchone()
2012-10-16 08:16:29 +00:00
if myissue is None : pass
else :
2013-05-15 09:09:43 +00:00
logger . fdebug ( " --Updating Status of issues table because of Upcoming status-- " )
logger . fdebug ( " ComicName: " + str ( myissue [ ' ComicName ' ] ) )
logger . fdebug ( " Issue number : " + str ( myissue [ ' Issue_Number ' ] ) )
2012-10-16 08:16:29 +00:00
mvcontroldict = { " IssueID " : myissue [ ' IssueID ' ] }
2012-10-16 15:12:44 +00:00
mvvalues = { " ComicID " : myissue [ ' ComicID ' ] ,
2012-10-16 08:16:29 +00:00
" Status " : " Wanted " }
2012-10-18 07:08:43 +00:00
myDB . upsert ( " issues " , mvvalues , mvcontroldict )
2012-10-16 08:16:29 +00:00
2012-10-21 15:30:26 +00:00
#remove old entry from upcoming so it won't try to continually download again.
deleteit = myDB . action ( " DELETE from upcoming WHERE ComicName=? AND IssueNumber=? " , [ mvup [ ' ComicName ' ] , mvup [ ' IssueNumber ' ] ] )
2013-07-30 04:57:37 +00:00
return serve_template ( templatename = " upcoming.html " , title = " Upcoming " , upcoming = upcoming , issues = issues , ann_list = ann_list )
2012-09-13 15:27:34 +00:00
upcoming . exposed = True
2012-09-24 05:17:29 +00:00
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
def skipped2wanted ( self , comicid , fromupdate = None ) :
2013-01-23 07:34:50 +00:00
# change all issues for a given ComicID that are Skipped, into Wanted.
issuestowanted = [ ]
issuesnumwant = [ ]
myDB = db . DBConnection ( )
skipped2 = myDB . select ( " SELECT * from issues WHERE ComicID=? AND Status= ' Skipped ' " , [ comicid ] )
for skippy in skipped2 :
mvcontroldict = { " IssueID " : skippy [ ' IssueID ' ] }
mvvalues = { " Status " : " Wanted " }
#print ("Changing issue " + str(skippy['Issue_Number']) + " to Wanted.")
myDB . upsert ( " issues " , mvvalues , mvcontroldict )
issuestowanted . append ( skippy [ ' IssueID ' ] )
issuesnumwant . append ( skippy [ ' Issue_Number ' ] )
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
if len ( issuestowanted ) > 0 :
if fromupdate is None :
logger . info ( " Marking issues: %s as Wanted " % issuesnumwant )
threading . Thread ( target = search . searchIssueIDList , args = [ issuestowanted ] ) . start ( )
else :
logger . info ( ' Marking issues: %s as Wanted ' & issuesnumwant )
logger . info ( ' These will be searched for on next Search Scan / Force Check ' )
return
2013-07-10 01:45:10 +00:00
raise cherrypy . HTTPRedirect ( " comicDetails?ComicID= %s " % [ comicid ] )
2013-01-23 07:34:50 +00:00
skipped2wanted . exposed = True
2013-07-11 03:37:52 +00:00
def annualDelete ( self , comicid ) :
myDB = db . DBConnection ( )
myDB . action ( " DELETE FROM annuals WHERE ComicID=? " , [ comicid ] )
logger . fdebug ( " Deleted all annuals from DB for ComicID of " + str ( comicid ) )
raise cherrypy . HTTPRedirect ( " comicDetails?ComicID= %s " % [ comicid ] )
annualDelete . exposed = True
2013-02-13 01:27:24 +00:00
def manualRename ( self , comicid ) :
if mylar . FILE_FORMAT == ' ' :
2013-02-27 08:28:40 +00:00
logger . error ( " You haven ' t specified a File Format in Configuration/Advanced " )
logger . error ( " Cannot rename files. " )
2013-02-13 01:27:24 +00:00
return
myDB = db . DBConnection ( )
comic = myDB . action ( " SELECT * FROM comics WHERE ComicID=? " , [ comicid ] ) . fetchone ( )
comicdir = comic [ ' ComicLocation ' ]
comicname = comic [ ' ComicName ' ]
extensions = ( ' .cbr ' , ' .cbz ' )
2013-02-27 08:28:40 +00:00
issues = myDB . action ( " SELECT * FROM issues WHERE ComicID=? " , [ comicid ] ) . fetchall ( )
2014-01-16 20:25:02 +00:00
if mylar . ANNUALS_ON :
issues + = myDB . action ( " SELECT * FROM annuals WHERE ComicID=? " , [ comicid ] ) . fetchall ( )
2013-02-13 01:27:24 +00:00
comfiles = [ ]
2013-02-27 08:28:40 +00:00
filefind = 0
2013-02-13 01:27:24 +00:00
for root , dirnames , filenames in os . walk ( comicdir ) :
for filename in filenames :
if filename . lower ( ) . endswith ( extensions ) :
2013-02-27 08:28:40 +00:00
#logger.info("filename being checked is : " + str(filename))
2013-02-13 01:27:24 +00:00
for issue in issues :
if issue [ ' Location ' ] == filename :
2013-02-27 08:28:40 +00:00
#logger.error("matched " + str(filename) + " to DB file " + str(issue['Location']))
2014-01-16 20:25:02 +00:00
if ' annual ' in issue [ ' Location ' ] . lower ( ) :
annualize = ' yes '
else :
annualize = None
renameiss = helpers . rename_param ( comicid , comicname , issue [ ' Issue_Number ' ] , filename , comicyear = None , issueid = None , annualize = annualize )
2013-02-13 01:27:24 +00:00
nfilename = renameiss [ ' nfilename ' ]
srciss = os . path . join ( comicdir , filename )
2013-05-25 06:18:00 +00:00
if mylar . LOWERCASE_FILENAMES :
dstiss = os . path . join ( comicdir , nfilename ) . lower ( )
else :
dstiss = os . path . join ( comicdir , nfilename )
2013-02-27 08:28:40 +00:00
if filename != nfilename :
logger . info ( " Renaming " + str ( filename ) + " ... to ... " + str ( nfilename ) )
try :
shutil . move ( srciss , dstiss )
except ( OSError , IOError ) :
logger . error ( " Failed to move files - check directories and manually re-run. " )
return
filefind + = 1
else :
logger . info ( " Not renaming " + str ( filename ) + " as it is in desired format already. " )
#continue
2013-03-08 01:36:36 +00:00
logger . info ( " I have renamed " + str ( filefind ) + " issues of " + comicname )
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
updater . forceRescan ( comicid )
2013-02-13 01:27:24 +00:00
manualRename . exposed = True
2013-01-28 20:31:43 +00:00
2012-09-24 05:17:29 +00:00
def searchScan ( self , name ) :
return serve_template ( templatename = " searchfix.html " , title = " Manage " , name = name )
searchScan . exposed = True
2012-09-13 15:27:34 +00:00
def manage ( self ) :
return serve_template ( templatename = " manage.html " , title = " Manage " )
manage . exposed = True
2012-09-18 13:13:42 +00:00
def manageComics ( self ) :
2012-09-13 15:27:34 +00:00
myDB = db . DBConnection ( )
comics = myDB . select ( ' SELECT * from comics order by ComicSortName COLLATE NOCASE ' )
2012-09-18 13:13:42 +00:00
return serve_template ( templatename = " managecomics.html " , title = " Manage Comics " , comics = comics )
manageComics . exposed = True
2012-09-13 15:27:34 +00:00
2012-09-18 04:00:43 +00:00
def manageIssues ( self ) :
2012-09-13 15:27:34 +00:00
myDB = db . DBConnection ( )
issues = myDB . select ( ' SELECT * from issues ' )
2012-09-24 05:17:29 +00:00
return serve_template ( templatename = " manageissues.html " , title = " Manage Issues " , issues = issues )
2012-09-18 13:13:42 +00:00
manageIssues . exposed = True
2012-09-13 15:27:34 +00:00
def manageNew ( self ) :
myDB = db . DBConnection ( )
newcomics = myDB . select ( ' SELECT * from newartists ' )
return serve_template ( templatename = " managenew.html " , title = " Manage New Artists " , newcomics = newcomics )
manageNew . exposed = True
2013-05-06 02:34:01 +00:00
2013-05-19 05:21:32 +00:00
def flushImports ( self ) :
myDB = db . DBConnection ( )
2013-05-25 06:18:00 +00:00
myDB . action ( ' DELETE from importresults ' )
2013-05-19 05:21:32 +00:00
logger . info ( " Flushing all Import Results and clearing the tables " )
raise cherrypy . HTTPRedirect ( " importResults " )
flushImports . exposed = True
2013-05-06 02:34:01 +00:00
def markImports ( self , action = None , * * args ) :
myDB = db . DBConnection ( )
comicstoimport = [ ]
for ComicName in args :
if action == ' massimport ' :
logger . info ( " initiating mass import mode for " + ComicName )
cid = ComicName . decode ( ' utf-8 ' , ' replace ' )
comicstoimport . append ( cid )
2013-05-19 05:21:32 +00:00
elif action == ' removeimport ' :
logger . info ( " removing " + ComicName + " from the Import list " )
myDB . action ( ' DELETE from importresults WHERE ComicName=? ' , [ ComicName ] )
2013-05-06 02:34:01 +00:00
if len ( comicstoimport ) > 0 :
logger . debug ( " Mass importing the following series: %s " % comicstoimport )
threading . Thread ( target = self . preSearchit , args = [ None , comicstoimport , len ( comicstoimport ) ] ) . start ( )
raise cherrypy . HTTPRedirect ( " importResults " )
markImports . exposed = True
2012-09-13 15:27:34 +00:00
2012-09-18 04:00:43 +00:00
def markComics ( self , action = None , * * args ) :
2012-09-13 15:27:34 +00:00
myDB = db . DBConnection ( )
2012-09-18 04:00:43 +00:00
comicsToAdd = [ ]
for ComicID in args :
2012-09-13 15:27:34 +00:00
if action == ' delete ' :
2012-09-18 04:00:43 +00:00
myDB . action ( ' DELETE from comics WHERE ComicID=? ' , [ ComicID ] )
myDB . action ( ' DELETE from issues WHERE ComicID=? ' , [ ComicID ] )
2012-09-13 15:27:34 +00:00
elif action == ' pause ' :
2012-09-18 04:00:43 +00:00
controlValueDict = { ' ComicID ' : ComicID }
2012-09-13 15:27:34 +00:00
newValueDict = { ' Status ' : ' Paused ' }
2012-09-18 04:00:43 +00:00
myDB . upsert ( " comics " , newValueDict , controlValueDict )
2012-09-13 15:27:34 +00:00
elif action == ' resume ' :
2012-09-18 04:00:43 +00:00
controlValueDict = { ' ComicID ' : ComicID }
2012-09-13 15:27:34 +00:00
newValueDict = { ' Status ' : ' Active ' }
2012-09-18 04:00:43 +00:00
myDB . upsert ( " comics " , newValueDict , controlValueDict )
2012-09-13 15:27:34 +00:00
else :
2012-09-18 04:00:43 +00:00
comicsToAdd . append ( ComicID )
if len ( comicsToAdd ) > 0 :
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
logger . fdebug ( " Refreshing comics: %s " % comicsToAdd )
2013-07-01 05:19:15 +00:00
#threading.Thread(target=importer.addComicIDListToDB, args=[comicsToAdd]).start()
threading . Thread ( target = updater . dbUpdate , args = [ comicsToAdd ] ) . start ( )
2012-09-13 15:27:34 +00:00
raise cherrypy . HTTPRedirect ( " home " )
2012-09-18 04:00:43 +00:00
markComics . exposed = True
2012-09-13 15:27:34 +00:00
def forceUpdate ( self ) :
from mylar import updater
threading . Thread ( target = updater . dbUpdate ) . start ( )
raise cherrypy . HTTPRedirect ( " home " )
forceUpdate . exposed = True
def forceSearch ( self ) :
from mylar import search
threading . Thread ( target = search . searchforissue ) . start ( )
raise cherrypy . HTTPRedirect ( " home " )
forceSearch . exposed = True
def forceRescan ( self , ComicID ) :
threading . Thread ( target = updater . forceRescan , args = [ ComicID ] ) . start ( )
2013-07-10 01:45:10 +00:00
raise cherrypy . HTTPRedirect ( " comicDetails?ComicID= %s " % ComicID )
2012-09-13 15:27:34 +00:00
forceRescan . exposed = True
def checkGithub ( self ) :
from mylar import versioncheck
versioncheck . checkGithub ( )
raise cherrypy . HTTPRedirect ( " home " )
checkGithub . exposed = True
def history ( self ) :
myDB = db . DBConnection ( )
history = myDB . select ( ''' SELECT * from snatched order by DateAdded DESC ''' )
return serve_template ( templatename = " history.html " , title = " History " , history = history )
return page
history . exposed = True
2013-02-25 15:36:43 +00:00
2013-05-01 18:25:46 +00:00
def reOrder ( request ) :
2013-07-30 04:57:37 +00:00
return request
# return serve_template(templatename="reorder.html", title="ReoRdered!", reorder=request)
2013-05-01 18:25:46 +00:00
reOrder . exposed = True
2013-02-25 15:36:43 +00:00
def readlist ( self ) :
myDB = db . DBConnection ( )
2013-07-30 04:57:37 +00:00
readlist = myDB . select ( " SELECT * from readinglist WHERE ComicName is not Null group by StoryArcID COLLATE NOCASE " )
2013-03-06 16:20:09 +00:00
issuelist = myDB . select ( " SELECT * from readlist " )
2013-04-22 03:11:12 +00:00
readConfig = {
2013-07-01 05:19:15 +00:00
" read2filename " : helpers . checked ( mylar . READ2FILENAME ) ,
" storyarcdir " : helpers . checked ( mylar . STORYARCDIR )
2013-04-22 03:11:12 +00:00
}
return serve_template ( templatename = " readinglist.html " , title = " Readlist " , readlist = readlist , issuelist = issuelist , readConfig = readConfig )
2013-02-25 15:36:43 +00:00
return page
readlist . exposed = True
2013-03-02 01:41:45 +00:00
def detailReadlist ( self , StoryArcID , StoryArcName ) :
myDB = db . DBConnection ( )
readlist = myDB . select ( " SELECT * from readinglist WHERE StoryArcID=? order by ReadingOrder ASC " , [ StoryArcID ] )
2013-07-01 05:19:15 +00:00
readConfig = {
" read2filename " : helpers . checked ( mylar . READ2FILENAME ) ,
" storyarcdir " : helpers . checked ( mylar . STORYARCDIR )
}
return serve_template ( templatename = " readlist.html " , title = " Detailed Arc list " , readlist = readlist , storyarcname = StoryArcName , storyarcid = StoryArcID , readConfig = readConfig )
2013-03-02 01:41:45 +00:00
detailReadlist . exposed = True
2013-04-22 03:11:12 +00:00
def removefromreadlist ( self , IssueID = None , StoryArcID = None , IssueArcID = None , AllRead = None ) :
2013-03-06 16:20:09 +00:00
myDB = db . DBConnection ( )
if IssueID :
myDB . action ( ' DELETE from readlist WHERE IssueID=? ' , [ IssueID ] )
logger . info ( " Removed " + str ( IssueID ) + " from Reading List " )
elif StoryArcID :
myDB . action ( ' DELETE from readinglist WHERE StoryArcID=? ' , [ StoryArcID ] )
logger . info ( " Removed " + str ( StoryArcID ) + " from Story Arcs. " )
elif IssueArcID :
myDB . action ( ' DELETE from readinglist WHERE IssueArcID=? ' , [ IssueArcID ] )
logger . info ( " Removed " + str ( IssueArcID ) + " from the Story Arc. " )
2013-04-22 03:11:12 +00:00
elif AllRead :
myDB . action ( " DELETE from readlist WHERE Status= ' Read ' " )
logger . info ( " Removed All issues that have been marked as Read from Reading List " )
2013-03-06 16:20:09 +00:00
removefromreadlist . exposed = True
def markasRead ( self , IssueID = None , IssueArcID = None ) :
myDB = db . DBConnection ( )
if IssueID :
issue = myDB . action ( ' SELECT * from readlist WHERE IssueID=? ' , [ IssueID ] ) . fetchone ( )
if issue [ ' Status ' ] == ' Read ' :
NewVal = { " Status " : " Added " }
else :
NewVal = { " Status " : " Read " }
CtrlVal = { " IssueID " : IssueID }
myDB . upsert ( " readlist " , NewVal , CtrlVal )
logger . info ( " Marked " + str ( issue [ ' ComicName ' ] ) + " # " + str ( issue [ ' Issue_Number ' ] ) + " as Read. " )
elif IssueArcID :
issue = myDB . action ( ' SELECT * from readinglist WHERE IssueArcID=? ' , [ IssueArcID ] ) . fetchone ( )
if issue [ ' Status ' ] == ' Read ' :
NewVal = { " Status " : " Added " }
else :
NewVal = { " Status " : " Read " }
CtrlVal = { " IssueArcID " : IssueArcID }
myDB . upsert ( " readinglist " , NewVal , CtrlVal )
logger . info ( " Marked " + str ( issue [ ' ComicName ' ] ) + " # " + str ( issue [ ' IssueNumber ' ] ) + " as Read. " )
markasRead . exposed = True
2013-02-25 15:36:43 +00:00
def addtoreadlist ( self , IssueID ) :
myDB = db . DBConnection ( )
readlist = myDB . action ( " SELECT * from issues where IssueID=? " , [ IssueID ] ) . fetchone ( )
2013-03-06 16:20:09 +00:00
comicinfo = myDB . action ( " SELECT * from comics where ComicID=? " , [ readlist [ ' ComicID ' ] ] ) . fetchone ( )
2013-02-25 15:36:43 +00:00
if readlist is None :
logger . error ( " Cannot locate IssueID - aborting.. " )
else :
logger . info ( " attempting to add..issueid " + readlist [ ' IssueID ' ] )
ctrlval = { " IssueID " : IssueID }
newval = { " DateAdded " : helpers . today ( ) ,
" Status " : " added " ,
2013-03-06 16:20:09 +00:00
" ComicID " : readlist [ ' ComicID ' ] ,
2013-02-25 15:36:43 +00:00
" Issue_Number " : readlist [ ' Issue_Number ' ] ,
2013-05-04 01:57:19 +00:00
" IssueDate " : readlist [ ' IssueDate ' ] ,
" SeriesYear " : comicinfo [ ' ComicYear ' ] ,
" ComicName " : comicinfo [ ' ComicName ' ] }
2013-02-25 15:36:43 +00:00
myDB . upsert ( " readlist " , newval , ctrlval )
logger . info ( " Added " + str ( readlist [ ' ComicName ' ] ) + " # " + str ( readlist [ ' Issue_Number ' ] ) + " to the Reading list. " )
2013-07-10 01:45:10 +00:00
raise cherrypy . HTTPRedirect ( " comicDetails?ComicID= %s " % readlist [ ' ComicID ' ] )
2013-02-25 15:36:43 +00:00
addtoreadlist . exposed = True
2013-03-02 01:41:45 +00:00
def importReadlist ( self , filename ) :
from xml . dom . minidom import parseString , Element
import random
myDB = db . DBConnection ( )
2013-07-30 04:57:37 +00:00
file = open ( filename )
2013-03-02 01:41:45 +00:00
data = file . read ( )
file . close ( )
dom = parseString ( data )
# of results
storyarc = dom . getElementsByTagName ( ' Name ' ) [ 0 ] . firstChild . wholeText
tracks = dom . getElementsByTagName ( ' Book ' )
i = 1
node = dom . documentElement
print ( " there are " + str ( len ( tracks ) ) + " issues in the story-arc: " + str ( storyarc ) )
#generate a random number for the ID, and tack on the total issue count to the end as a str :)
storyarcid = str ( random . randint ( 1000 , 9999 ) ) + str ( len ( tracks ) )
i = 1
for book_element in tracks :
st_issueid = str ( storyarcid ) + " _ " + str ( random . randint ( 1000 , 9999 ) )
comicname = book_element . getAttribute ( ' Series ' )
2013-03-08 01:36:36 +00:00
print ( " comic: " + comicname )
2013-03-02 01:41:45 +00:00
comicnumber = book_element . getAttribute ( ' Number ' )
print ( " number: " + str ( comicnumber ) )
comicvolume = book_element . getAttribute ( ' Volume ' )
print ( " volume: " + str ( comicvolume ) )
comicyear = book_element . getAttribute ( ' Year ' )
print ( " year: " + str ( comicyear ) )
CtrlVal = { " IssueArcID " : st_issueid }
NewVals = { " StoryArcID " : storyarcid ,
" ComicName " : comicname ,
" IssueNumber " : comicnumber ,
" SeriesYear " : comicvolume ,
" IssueYear " : comicyear ,
" StoryArc " : storyarc ,
" ReadingOrder " : i ,
" TotalIssues " : len ( tracks ) }
myDB . upsert ( " readinglist " , NewVals , CtrlVal )
i + = 1
2013-04-22 03:11:12 +00:00
raise cherrypy . HTTPRedirect ( " detailReadlist?StoryArcID= %s &StoryArcName= %s " % ( storyarcid , storyarc ) )
2013-03-02 01:41:45 +00:00
importReadlist . exposed = True
2013-03-06 16:20:09 +00:00
#Story Arc Ascension...welcome to the next level :)
2013-04-22 03:11:12 +00:00
def ArcWatchlist ( self , StoryArcID = None ) :
2013-03-06 16:20:09 +00:00
myDB = db . DBConnection ( )
2013-04-22 03:11:12 +00:00
if StoryArcID :
ArcWatch = myDB . select ( " SELECT * FROM readinglist WHERE StoryArcID=? " , [ StoryArcID ] )
else :
ArcWatch = myDB . select ( " SELECT * FROM readinglist " )
2013-03-06 16:20:09 +00:00
if ArcWatch is None : logger . info ( " No Story Arcs to search " )
else :
Comics = myDB . select ( " SELECT * FROM comics " )
arc_match = [ ]
2013-04-22 03:11:12 +00:00
wantedlist = [ ]
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
sarc_title = None
2013-04-22 03:11:12 +00:00
showonreadlist = 1 # 0 won't show storyarcissues on readinglist main page, 1 will show
2013-03-06 16:20:09 +00:00
for arc in ArcWatch :
2013-04-22 03:11:12 +00:00
logger . fdebug ( " arc: " + arc [ ' storyarc ' ] + " : " + arc [ ' ComicName ' ] + " : " + arc [ ' IssueNumber ' ] )
2013-03-06 16:20:09 +00:00
#cycle through the story arcs here for matches on the watchlist
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
if sarc_title != arc [ ' storyarc ' ] :
dstloc = os . path . join ( mylar . DESTINATION_DIR , ' StoryArcs ' , arc [ ' storyarc ' ] )
if os . path . isdir ( dstloc ) :
logger . info ( ' Validating Directory ( ' + dstloc + ' ). Already exists! Continuing... ' )
else :
logger . fdebug ( ' Updated Directory doesn not exist! - attempting to create now. ' )
filechecker . validateAndCreateDirectory ( dstloc , True )
2013-03-06 16:20:09 +00:00
mod_arc = re . sub ( ' [ \ :/, \' \ / \ - \ & \ % \ $ \ # \ @ \ ! \ * \ + \ .] ' , ' ' , arc [ ' ComicName ' ] )
2013-04-22 03:11:12 +00:00
mod_arc = re . sub ( ' \\ bthe \\ b ' , ' ' , mod_arc . lower ( ) )
mod_arc = re . sub ( ' \\ band \\ b ' , ' ' , mod_arc . lower ( ) )
2013-03-06 16:20:09 +00:00
mod_arc = re . sub ( r ' \ s ' , ' ' , mod_arc )
2013-04-22 03:11:12 +00:00
matcheroso = " no "
2013-03-06 16:20:09 +00:00
for comic in Comics :
2013-04-22 03:11:12 +00:00
logger . fdebug ( " comic: " + comic [ ' ComicName ' ] )
2013-03-06 16:20:09 +00:00
mod_watch = re . sub ( ' [ \ : \ , \' \ / \ - \ & \ % \ $ \ # \ @ \ ! \ * \ + \ .] ' , ' ' , comic [ ' ComicName ' ] )
2013-04-22 03:11:12 +00:00
mod_watch = re . sub ( ' \\ bthe \\ b ' , ' ' , mod_watch . lower ( ) )
mod_watch = re . sub ( ' \\ band \\ b ' , ' ' , mod_watch . lower ( ) )
2013-03-06 16:20:09 +00:00
mod_watch = re . sub ( r ' \ s ' , ' ' , mod_watch )
2013-04-22 03:11:12 +00:00
if mod_watch == mod_arc : # and arc['SeriesYear'] == comic['ComicYear']:
2013-07-01 05:19:15 +00:00
logger . fdebug ( " initial name match - confirming issue # is present in series " )
2013-04-22 03:11:12 +00:00
if comic [ ' ComicID ' ] [ : 1 ] == ' G ' :
# if it's a multi-volume series, it's decimalized - let's get rid of the decimal.
GCDissue , whocares = helpers . decimal_issue ( arc [ ' IssueNumber ' ] )
GCDissue = int ( GCDissue ) / 1000
2013-04-26 03:45:02 +00:00
if ' . ' not in str ( GCDissue ) : GCDissue = str ( GCDissue ) + " .00 "
2013-04-22 03:11:12 +00:00
logger . fdebug ( " issue converted to " + str ( GCDissue ) )
2013-09-18 04:49:24 +00:00
isschk = myDB . action ( " SELECT * FROM issues WHERE ComicName=? AND Issue_Number=? AND ComicID=? " , [ comic [ ' ComicName ' ] , str ( GCDissue ) , comic [ ' ComicID ' ] ] ) . fetchone ( )
2013-04-22 03:11:12 +00:00
else :
2013-09-18 04:49:24 +00:00
isschk = myDB . action ( " SELECT * FROM issues WHERE ComicName=? AND Issue_Number=? AND ComicID=? " , [ comic [ ' ComicName ' ] , arc [ ' IssueNumber ' ] , comic [ ' ComicID ' ] ] ) . fetchone ( )
2013-04-22 03:11:12 +00:00
if isschk is None :
logger . fdebug ( " we matched on name, but issue " + str ( arc [ ' IssueNumber ' ] ) + " doesn ' t exist for " + comic [ ' ComicName ' ] )
else :
2013-07-01 05:19:15 +00:00
#this gets ugly - if the name matches and the issue, it could still be wrong series
#use series year to break it down further.
2013-09-18 04:49:24 +00:00
logger . fdebug ( ' COMIC-comicyear: ' + str ( int ( comic [ ' ComicYear ' ] ) ) )
logger . fdebug ( ' ARC-seriesyear: ' + str ( int ( arc [ ' SeriesYear ' ] ) ) )
2013-07-01 05:19:15 +00:00
if int ( comic [ ' ComicYear ' ] ) != int ( arc [ ' SeriesYear ' ] ) :
logger . fdebug ( " Series years are different - discarding match. " + str ( comic [ ' ComicYear ' ] ) + " != " + str ( arc [ ' SeriesYear ' ] ) )
2013-09-07 06:05:32 +00:00
else :
logger . fdebug ( " issue #: " + str ( arc [ ' IssueNumber ' ] ) + " is present! " )
2013-09-18 04:49:24 +00:00
logger . fdebug ( ' isschk: ' + str ( isschk ) )
logger . fdebug ( " Comicname: " + arc [ ' ComicName ' ] )
logger . fdebug ( " ComicID: " + str ( isschk [ ' ComicID ' ] ) )
logger . fdebug ( " Issue: " + str ( arc [ ' IssueNumber ' ] ) )
logger . fdebug ( " IssueArcID: " + str ( arc [ ' IssueArcID ' ] ) )
2013-09-07 06:05:32 +00:00
#gather the matches now.
arc_match . append ( {
2013-09-11 18:49:26 +00:00
" match_storyarc " : arc [ ' storyarc ' ] ,
2013-09-07 06:05:32 +00:00
" match_name " : arc [ ' ComicName ' ] ,
" match_id " : isschk [ ' ComicID ' ] ,
" match_issue " : arc [ ' IssueNumber ' ] ,
" match_issuearcid " : arc [ ' IssueArcID ' ] ,
2013-09-11 18:49:26 +00:00
" match_seriesyear " : comic [ ' ComicYear ' ] ,
2013-09-18 04:49:24 +00:00
" match_readingorder " : arc [ ' ReadingOrder ' ] ,
2013-09-11 18:49:26 +00:00
" match_filedirectory " : comic [ ' ComicLocation ' ] } )
2013-09-07 06:05:32 +00:00
matcheroso = " yes "
2013-09-11 18:49:26 +00:00
break
2013-04-22 03:11:12 +00:00
if matcheroso == " no " :
logger . fdebug ( " Unable to find a match for " + arc [ ' ComicName ' ] + " :# " + str ( arc [ ' IssueNumber ' ] ) )
wantedlist . append ( {
" ComicName " : arc [ ' ComicName ' ] ,
" IssueNumber " : arc [ ' IssueNumber ' ] ,
" IssueYear " : arc [ ' IssueYear ' ] } )
2013-09-18 04:49:24 +00:00
dstloc = os . path . join ( mylar . DESTINATION_DIR , ' StoryArcs ' , arc [ ' storyarc ' ] )
logger . fdebug ( ' destination location set to : ' + dstloc )
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
2013-09-18 04:49:24 +00:00
filechk = filechecker . listFiles ( dstloc , arc [ ' ComicName ' ] , sarc = ' true ' )
fn = 0
fccnt = filechk [ ' comiccount ' ]
while ( fn < fccnt ) :
haveissue = " no "
issuedupe = " no "
try :
tmpfc = filechk [ ' comiclist ' ] [ fn ]
except IndexError :
break
temploc = tmpfc [ ' JusttheDigits ' ] . replace ( ' _ ' , ' ' )
fcdigit = helpers . issuedigits ( arc [ ' IssueNumber ' ] )
int_iss = helpers . issuedigits ( temploc )
if int_iss == fcdigit :
logger . fdebug ( arc [ ' ComicName ' ] + ' Issue # ' + arc [ ' IssueNumber ' ] + ' already present in StoryArc directory. ' )
#update readinglist db to reflect status.
if mylar . READ2FILENAME :
readorder = helpers . renamefile_readingorder ( arc [ ' ReadingOrder ' ] )
dfilename = str ( readorder ) + " - " + tmpfc [ ' ComicFilename ' ]
else :
dfilename = tmpfc [ ' ComicFilename ' ]
newVal = { " Status " : " Downloaded " ,
" Location " : dfilename } #tmpfc['ComicFilename']}
ctrlVal = { " IssueArcID " : arc [ ' IssueArcID ' ] }
myDB . upsert ( " readinglist " , newVal , ctrlVal )
fn + = 1
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
sarc_title = arc [ ' storyarc ' ]
2013-04-22 03:11:12 +00:00
logger . fdebug ( " we matched on " + str ( len ( arc_match ) ) + " issues " )
2013-03-06 16:20:09 +00:00
for m_arc in arc_match :
#now we cycle through the issues looking for a match.
2013-04-22 03:11:12 +00:00
issue = myDB . action ( " SELECT * FROM issues where ComicID=? and Issue_Number=? " , [ m_arc [ ' match_id ' ] , m_arc [ ' match_issue ' ] ] ) . fetchone ( )
2013-04-07 18:06:36 +00:00
if issue is None : pass
else :
logger . fdebug ( " issue: " + str ( issue [ ' Issue_Number ' ] ) + " ... " + str ( m_arc [ ' match_issue ' ] ) )
# if helpers.decimal_issue(issuechk['Issue_Number']) == helpers.decimal_issue(m_arc['match_issue']):
if issue [ ' Issue_Number ' ] == m_arc [ ' match_issue ' ] :
logger . fdebug ( " we matched on " + str ( issue [ ' Issue_Number ' ] ) + " for " + str ( m_arc [ ' match_name ' ] ) )
2013-09-07 06:05:32 +00:00
if issue [ ' Status ' ] == ' Downloaded ' or issue [ ' Status ' ] == ' Archived ' or issue [ ' Status ' ] == ' Snatched ' :
2013-04-22 03:11:12 +00:00
ctrlVal = { " IssueArcID " : m_arc [ ' match_issuearcid ' ] }
newVal = { " Status " : issue [ ' Status ' ] ,
" IssueID " : issue [ ' IssueID ' ] }
if showonreadlist :
showctrlVal = { " IssueID " : issue [ ' IssueID ' ] }
shownewVal = { " ComicName " : issue [ ' ComicName ' ] ,
" Issue_Number " : issue [ ' Issue_Number ' ] ,
" IssueDate " : issue [ ' IssueDate ' ] ,
" SeriesYear " : m_arc [ ' match_seriesyear ' ] ,
" ComicID " : m_arc [ ' match_id ' ] }
myDB . upsert ( " readlist " , shownewVal , showctrlVal )
2013-03-12 16:06:44 +00:00
myDB . upsert ( " readinglist " , newVal , ctrlVal )
2013-04-22 03:11:12 +00:00
logger . info ( " Already have " + issue [ ' ComicName ' ] + " :# " + str ( issue [ ' Issue_Number ' ] ) )
2013-09-11 18:49:26 +00:00
if issue [ ' Status ' ] == ' Downloaded ' :
2013-09-18 04:49:24 +00:00
issloc = os . path . join ( m_arc [ ' match_filedirectory ' ] , issue [ ' Location ' ] )
logger . fdebug ( ' source location set to : ' + issloc )
dstloc = os . path . join ( mylar . DESTINATION_DIR , ' StoryArcs ' , m_arc [ ' match_storyarc ' ] )
logger . fdebug ( ' destination location set to : ' + dstloc )
2013-09-11 18:49:26 +00:00
logger . fdebug ( ' attempting to copy into StoryArc directory ' )
#copy into StoryArc directory...
if os . path . isfile ( issloc ) :
if not os . path . isfile ( dstloc ) :
2013-09-18 04:49:24 +00:00
if mylar . READ2FILENAME :
readorder = helpers . renamefile_readingorder ( m_arc [ ' match_readingorder ' ] )
dfilename = str ( readorder ) + " - " + issue [ ' Location ' ]
else :
dfilename = issue [ ' Location ' ]
dstloc = os . path . join ( mylar . DESTINATION_DIR , ' StoryArcs ' , m_arc [ ' match_storyarc ' ] , dfilename )
2013-09-11 18:49:26 +00:00
logger . fdebug ( ' copying ' + issloc + ' to ' + dstloc )
shutil . copy ( issloc , dstloc )
else :
logger . fdebug ( ' destination file exists: ' + dstloc )
else :
logger . fdebug ( ' source file does not exist: ' + issloc )
2013-04-22 03:11:12 +00:00
else :
logger . fdebug ( " We don ' t have " + issue [ ' ComicName ' ] + " :# " + str ( issue [ ' Issue_Number ' ] ) )
ctrlVal = { " IssueArcID " : m_arc [ ' match_issuearcid ' ] }
newVal = { " Status " : " Wanted " ,
" IssueID " : issue [ ' IssueID ' ] }
myDB . upsert ( " readinglist " , newVal , ctrlVal )
2013-07-01 05:19:15 +00:00
logger . info ( " Marked " + issue [ ' ComicName ' ] + " :# " + str ( issue [ ' Issue_Number ' ] ) + " as Wanted. " )
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
2013-04-22 03:11:12 +00:00
2013-03-06 16:20:09 +00:00
ArcWatchlist . exposed = True
2013-04-26 03:45:02 +00:00
def ReadGetWanted ( self , StoryArcID ) :
# this will queue up (ie. make 'Wanted') issues in a given Story Arc that are 'Not Watched'
2013-07-01 05:19:15 +00:00
print StoryArcID
stupdate = [ ]
2013-09-07 06:05:32 +00:00
mode = ' story_arc '
2013-04-26 03:45:02 +00:00
myDB = db . DBConnection ( )
2013-07-01 05:19:15 +00:00
wantedlist = myDB . select ( " SELECT * FROM readinglist WHERE StoryArcID=? AND Status is Null " , [ StoryArcID ] )
2013-04-26 03:45:02 +00:00
if wantedlist is not None :
for want in wantedlist :
2013-07-01 05:19:15 +00:00
print want
issuechk = myDB . action ( " SELECT * FROM issues WHERE IssueID=? " , [ want [ ' IssueArcID ' ] ] ) . fetchone ( )
SARC = want [ ' StoryArc ' ]
IssueArcID = want [ ' IssueArcID ' ]
if issuechk is None :
# none means it's not a 'watched' series
2013-09-07 06:05:32 +00:00
s_comicid = None
s_issueid = None
2013-07-01 05:19:15 +00:00
logger . fdebug ( " -- NOT a watched series queue. " )
logger . fdebug ( want [ ' ComicName ' ] + " -- # " + str ( want [ ' IssueNumber ' ] ) )
logger . info ( u " Story Arc : " + str ( SARC ) + " queueing selected issue... " )
logger . info ( u " IssueArcID : " + str ( IssueArcID ) )
2013-09-07 06:05:32 +00:00
foundcom , prov = search . search_init ( ComicName = want [ ' ComicName ' ] , IssueNumber = want [ ' IssueNumber ' ] , ComicYear = want [ ' IssueYear ' ] , SeriesYear = want [ ' SeriesYear ' ] , IssueDate = None , IssueID = None , AlternateSearch = None , UseFuzzy = None , ComicVersion = None , SARC = SARC , IssueArcID = IssueArcID , mode = None , rsscheck = None , ComicID = None )
2013-07-01 05:19:15 +00:00
else :
# it's a watched series
2013-09-07 06:05:32 +00:00
s_comicid = issuechk [ ' ComicID ' ]
s_issueid = issuechk [ ' IssueID ' ]
2013-07-01 05:19:15 +00:00
logger . fdebug ( " -- watched series queue. " )
logger . fdebug ( issuechk [ ' ComicName ' ] + " -- # " + str ( issuechk [ ' Issue_Number ' ] ) )
2013-09-07 06:05:32 +00:00
foundcom , prov = search . search_init ( ComicName = issuechk [ ' ComicName ' ] , IssueNumber = issuechk [ ' Issue_Number ' ] , ComicYear = issuechk [ ' IssueYear ' ] , SeriesYear = issuechk [ ' SeriesYear ' ] , IssueDate = None , IssueID = issuechk [ ' IssueID ' ] , AlternateSearch = None , UseFuzzy = None , ComicVersion = None , SARC = SARC , IssueArcID = IssueArcID )
2013-07-01 05:19:15 +00:00
if foundcom == " yes " :
print " sucessfully found. "
2013-09-07 06:05:32 +00:00
#update the status - this is necessary for torrents as they are in 'snatched' status.
updater . foundsearch ( s_comicid , s_issueid , mode = mode , provider = prov , SARC = SARC , IssueArcID = IssueArcID )
2013-07-01 05:19:15 +00:00
else :
print " not sucessfully found. "
stupdate . append ( { " Status " : " Wanted " ,
" IssueArcID " : IssueArcID ,
" IssueID " : " None " } )
watchlistchk = myDB . select ( " SELECT * FROM readinglist WHERE StoryArcID=? AND Status= ' Wanted ' " , [ StoryArcID ] )
if watchlistchk is not None :
for watchchk in watchlistchk :
2013-09-07 06:05:32 +00:00
print " Watchlist hit - " + str ( watchchk [ ' ComicName ' ] )
2013-07-01 05:19:15 +00:00
issuechk = myDB . action ( " SELECT * FROM issues WHERE IssueID=? " , [ watchchk [ ' IssueArcID ' ] ] ) . fetchone ( )
SARC = watchchk [ ' StoryArc ' ]
IssueArcID = watchchk [ ' IssueArcID ' ]
if issuechk is None :
# none means it's not a 'watched' series
2013-09-07 06:05:32 +00:00
s_comicid = None
s_issueid = None
2013-07-01 05:19:15 +00:00
logger . fdebug ( " -- NOT a watched series queue. " )
2013-09-07 06:05:32 +00:00
logger . fdebug ( watchchk [ ' ComicName ' ] + " -- # " + str ( watchchk [ ' IssueNumber ' ] ) )
2013-07-01 05:19:15 +00:00
logger . info ( u " Story Arc : " + str ( SARC ) + " queueing selected issue... " )
logger . info ( u " IssueArcID : " + str ( IssueArcID ) )
2013-09-07 06:05:32 +00:00
foundcom , prov = search . search_init ( ComicName = watchchk [ ' ComicName ' ] , IssueNumber = watchchk [ ' IssueNumber ' ] , ComicYear = watchchk [ ' IssueYEAR ' ] , SeriesYear = watchchk [ ' SeriesYear ' ] , IssueDate = None , IssueID = None , AlternateSearch = None , UseFuzzy = None , ComicVersion = None , SARC = SARC , IssueArcID = IssueArcID , mode = None , rsscheck = None , ComicID = None )
2013-07-01 05:19:15 +00:00
else :
# it's a watched series
2013-09-07 06:05:32 +00:00
s_comicid = issuechk [ ' ComicID ' ]
s_issueid = issuechk [ ' IssueID ' ]
2013-07-01 05:19:15 +00:00
logger . fdebug ( " -- watched series queue. " )
logger . fdebug ( issuechk [ ' ComicName ' ] + " -- # " + str ( issuechk [ ' Issue_Number ' ] ) )
2013-09-07 06:05:32 +00:00
foundcom , prov = search . search_init ( ComicName = issuechk [ ' ComicName ' ] , IssueNumber = issuechk [ ' Issue_Number ' ] , ComicYear = issuechk [ ' IssueYear ' ] , SeriesYear = issuechk [ ' SeriesYear ' ] , IssueDate = None , IssueID = issuechk [ ' IssueID ' ] , AlternateSearch = None , UseFuzzy = None , ComicVersion = None , SARC = SARC , IssueArcID = IssueArcID , mode = None , rsscheck = None , ComicID = None )
2013-07-01 05:19:15 +00:00
if foundcom == " yes " :
print " sucessfully found. "
2013-09-07 06:05:32 +00:00
updater . foundsearch ( s_comicid , s_issueid , mode = mode , provider = prov , SARC = SARC , IssueArcID = IssueArcID )
2013-07-01 05:19:15 +00:00
else :
print " Watchlist issue not sucessfully found. "
print " issuearcid: " + str ( IssueArcID )
2013-09-07 06:05:32 +00:00
print " issueid: " + str ( s_issueid )
2013-07-01 05:19:15 +00:00
stupdate . append ( { " Status " : " Wanted " ,
" IssueArcID " : IssueArcID ,
2013-09-07 06:05:32 +00:00
" IssueID " : s_issueid } )
2013-07-01 05:19:15 +00:00
if len ( stupdate ) > 0 :
print str ( len ( stupdate ) ) + " issues need to get updated to Wanted Status "
for st in stupdate :
ctrlVal = { ' IssueArcID ' : st [ ' IssueArcID ' ] }
newVal = { ' Status ' : st [ ' Status ' ] }
if st [ ' IssueID ' ] :
print " issueid: " + str ( st [ ' IssueID ' ] )
newVal [ ' IssueID ' ] = st [ ' IssueID ' ]
myDB . upsert ( " readinglist " , newVal , ctrlVal )
2013-05-25 06:18:00 +00:00
ReadGetWanted . exposed = True
2013-04-22 03:11:12 +00:00
def ReadMassCopy ( self , StoryArcID , StoryArcName ) :
#this copies entire story arcs into the /cache/<storyarc> folder
#alternatively, it will copy the issues individually directly to a 3rd party device (ie.tablet)
myDB = db . DBConnection ( )
copylist = myDB . select ( " SELECT * FROM readlist WHERE StoryArcID=? AND Status= ' Downloaded ' " , [ StoryArcID ] )
if copylist is None :
logger . fdebug ( " You don ' t have any issues from " + StoryArcName + " . Aborting Mass Copy. " )
return
else :
dst = os . path . join ( mylar . CACHE , StoryArcName )
for files in copylist :
copyloc = files [ ' Location ' ]
ReadMassCopy . exposed = True
2013-05-25 06:18:00 +00:00
def importLog ( self , ComicName ) :
myDB = db . DBConnection ( )
impchk = myDB . action ( " SELECT * FROM importresults WHERE ComicName=? " , [ ComicName ] ) . fetchone ( )
if impchk is None :
logger . error ( u " No associated log found for this import : " + ComicName )
return
implog = impchk [ ' implog ' ] . replace ( " \n " , " <br /> \n " )
return implog
# return serve_template(templatename="importlog.html", title="Log", implog=implog)
importLog . exposed = True
2012-09-13 15:27:34 +00:00
def logs ( self ) :
2013-02-06 19:55:23 +00:00
if mylar . LOG_LEVEL is None or mylar . LOG_LEVEL == ' ' :
2013-04-22 03:11:12 +00:00
mylar . LOG_LEVEL = ' INFO '
2013-02-06 19:55:23 +00:00
return serve_template ( templatename = " logs.html " , title = " Log " , lineList = mylar . LOG_LIST , log_level = mylar . LOG_LEVEL )
2012-09-13 15:27:34 +00:00
logs . exposed = True
2013-02-06 19:55:23 +00:00
2013-04-22 03:11:12 +00:00
def log_change ( self , loglevel ) :
if log_level is not None :
print ( " changing logger to " + str ( log_level ) )
LOGGER . setLevel ( log_level )
return serve_template ( templatename = " logs.html " , title = " Log " , lineList = mylar . LOG_LIST , log_level = log_level )
2013-02-06 19:55:23 +00:00
log_change . exposed = True
2012-09-13 15:27:34 +00:00
def clearhistory ( self , type = None ) :
myDB = db . DBConnection ( )
if type == ' all ' :
logger . info ( u " Clearing all history " )
myDB . action ( ' DELETE from snatched ' )
else :
logger . info ( u " Clearing history where status is %s " % type )
myDB . action ( ' DELETE from snatched WHERE Status=? ' , [ type ] )
raise cherrypy . HTTPRedirect ( " history " )
clearhistory . exposed = True
2013-02-06 19:55:23 +00:00
2013-05-04 01:57:19 +00:00
def downloadLocal ( self , IssueID = None , IssueArcID = None , ReadOrder = None , dir = None ) :
2013-02-06 19:55:23 +00:00
myDB = db . DBConnection ( )
2013-05-04 01:57:19 +00:00
issueDL = myDB . action ( " SELECT * FROM issues WHERE IssueID=? " , [ IssueID ] ) . fetchone ( )
comicid = issueDL [ ' ComicID ' ]
#print ("comicid: " + str(comicid))
comic = myDB . action ( " SELECT * FROM comics WHERE ComicID=? " , [ comicid ] ) . fetchone ( )
#---issue info
comicname = comic [ ' ComicName ' ]
issuenum = issueDL [ ' Issue_Number ' ]
issuedate = issueDL [ ' IssueDate ' ]
seriesyear = comic [ ' ComicYear ' ]
#---
issueLOC = comic [ ' ComicLocation ' ]
#print ("IssueLOC: " + str(issueLOC))
issueFILE = issueDL [ ' Location ' ]
#print ("IssueFILE: "+ str(issueFILE))
issuePATH = os . path . join ( issueLOC , issueFILE )
#print ("IssuePATH: " + str(issuePATH))
# if dir is None, it's a normal copy to cache kinda thing.
# if dir is a path, then it's coming from the pullist as the location to put all the weekly comics
if dir is not None :
dstPATH = dir
else :
2013-04-22 03:11:12 +00:00
dstPATH = os . path . join ( mylar . CACHE_DIR , issueFILE )
2013-05-04 01:57:19 +00:00
#print ("dstPATH: " + str(dstPATH))
if IssueID :
ISnewValueDict = { ' inCacheDIR ' : ' True ' ,
' Location ' : issueFILE }
2013-04-22 03:11:12 +00:00
if IssueArcID :
if mylar . READ2FILENAME :
#if it's coming from a StoryArc, check to see if we're appending the ReadingOrder to the filename
ARCissueFILE = ReadOrder + " - " + issueFILE
dstPATH = os . path . join ( mylar . CACHE_DIR , ARCissueFILE )
2013-05-04 01:57:19 +00:00
ISnewValueDict = { ' inCacheDIR ' : ' True ' ,
' Location ' : issueFILE }
2013-04-22 03:11:12 +00:00
# issueDL = myDB.action("SELECT * FROM readinglist WHERE IssueArcID=?", [IssueArcID]).fetchone()
# storyarcid = issueDL['StoryArcID']
# #print ("comicid: " + str(comicid))
# issueLOC = mylar.DESTINATION_DIR
# #print ("IssueLOC: " + str(issueLOC))
# issueFILE = issueDL['Location']
# #print ("IssueFILE: "+ str(issueFILE))
# issuePATH = os.path.join(issueLOC,issueFILE)
# #print ("IssuePATH: " + str(issuePATH))
# dstPATH = os.path.join(mylar.CACHE_DIR, issueFILE)
# #print ("dstPATH: " + str(dstPATH))
2013-05-04 01:57:19 +00:00
2013-02-17 10:31:18 +00:00
try :
shutil . copy2 ( issuePATH , dstPATH )
except IOError as e :
logger . error ( " Could not copy " + str ( issuePATH ) + " to " + str ( dstPATH ) + " . Copy to Cache terminated. " )
2013-07-10 01:45:10 +00:00
raise cherrypy . HTTPRedirect ( " comicDetails?ComicID= %s " % comicid )
2013-02-17 10:31:18 +00:00
logger . debug ( " sucessfully copied to cache...Enabling Download link " )
controlValueDict = { ' IssueID ' : IssueID }
2013-05-04 01:57:19 +00:00
RLnewValueDict = { ' inCacheDIR ' : ' True ' ,
' Location ' : issueFILE ,
' ComicID ' : comicid ,
' ComicName ' : comicname ,
' Issue_Number ' : issuenum ,
' SeriesYear ' : seriesyear ,
' IssueDate ' : issuedate }
myDB . upsert ( " readlist " , RLnewValueDict , controlValueDict )
myDB . upsert ( " issues " , ISnewValueDict , controlValueDict )
2013-04-22 03:11:12 +00:00
if IssueArcID :
controlValueD = { ' IssueArcID ' : IssueArcID }
newValueDict = { ' inCacheDIR ' : ' True ' ,
' Location ' : ARCissueFILE }
myDB . upsert ( " readinglist " , newValueDict , controlValueD )
2013-02-17 10:31:18 +00:00
#print("DB updated - Download link now enabled.")
2013-02-06 19:55:23 +00:00
downloadLocal . exposed = True
2013-05-04 01:57:19 +00:00
2013-05-04 04:40:32 +00:00
def MassWeeklyDownload ( self , pulldate , weekfolder = 0 ) :
mylar . WEEKFOLDER = int ( weekfolder )
mylar . config_write ( )
2013-05-04 01:57:19 +00:00
# this will download all downloaded comics from the weekly pull list and throw them
# into a 'weekly' pull folder for those wanting to transfer directly to a 3rd party device.
myDB = db . DBConnection ( )
2013-05-04 04:40:32 +00:00
if mylar . WEEKFOLDER :
desdir = os . path . join ( mylar . DESTINATION_DIR , pulldate )
if os . path . isdir ( desdir ) :
logger . info ( u " Directory ( " + desdir + " ) already exists! Continuing... " )
else :
logger . info ( " Directory doesn ' t exist! " )
try :
os . makedirs ( desdir )
logger . info ( u " Directory successfully created at: " + desdir )
except OSError :
logger . error ( u " Could not create comicdir : " + desdir )
logger . error ( u " Defaulting to : " + mylar . DESTINATION_DIR )
desdir = mylar . DESTINATION_DIR
else :
desdir = mylar . GRABBAG_DIR
2013-05-04 01:57:19 +00:00
clist = myDB . select ( " SELECT * FROM Weekly WHERE Status= ' Downloaded ' " )
if clist is None : # nothing on the list, just go go gone
logger . info ( " There aren ' t any issues downloaded from this week yet. " )
else :
2013-05-04 04:40:32 +00:00
iscount = 0
2013-05-04 01:57:19 +00:00
for cl in clist :
isslist = myDB . select ( " SELECT * FROM Issues WHERE ComicID=? AND Status= ' Downloaded ' " , [ cl [ ' ComicID ' ] ] )
if isslist is None : pass # no issues found for comicid - boo/boo
else :
for iss in isslist :
#go through issues downloaded until found one we want.
if iss [ ' Issue_Number ' ] == cl [ ' ISSUE ' ] :
2013-05-04 04:40:32 +00:00
self . downloadLocal ( iss [ ' IssueID ' ] , dir = desdir )
logger . info ( " Copied " + iss [ ' ComicName ' ] + " # " + str ( iss [ ' Issue_Number ' ] ) + " to " + desdir . encode ( ' utf-8 ' ) . strip ( ) )
iscount + = 1
2013-05-04 01:57:19 +00:00
break
2013-05-04 04:40:32 +00:00
logger . info ( " I have copied " + str ( iscount ) + " issues from this Week ' s pullist as requested. " )
raise cherrypy . HTTPRedirect ( " pullist " )
2013-05-04 01:57:19 +00:00
MassWeeklyDownload . exposed = True
2012-09-13 15:27:34 +00:00
2013-01-28 20:31:43 +00:00
#for testing.
def idirectory ( self ) :
return serve_template ( templatename = " idirectory.html " , title = " Import a Directory " )
idirectory . exposed = True
2013-02-13 01:27:24 +00:00
def confirmResult ( self , comicname , comicid ) :
#print ("here.")
mode = ' series '
sresults = mb . findComic ( comicname , mode , None )
#print sresults
type = ' comic '
return serve_template ( templatename = " searchresults.html " , title = ' Import Results for: " ' + comicname + ' " ' , searchresults = sresults , type = type , imported = ' confirm ' , ogcname = comicid )
confirmResult . exposed = True
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
def comicScan ( self , path , scan = 0 , libraryscan = 0 , redirect = None , autoadd = 0 , imp_move = 0 , imp_rename = 0 , imp_metadata = 0 ) :
2013-01-28 20:31:43 +00:00
mylar . LIBRARYSCAN = libraryscan
mylar . ADD_COMICS = autoadd
mylar . COMIC_DIR = path
mylar . IMP_MOVE = imp_move
mylar . IMP_RENAME = imp_rename
2013-02-06 19:55:23 +00:00
mylar . IMP_METADATA = imp_metadata
2013-01-28 20:31:43 +00:00
mylar . config_write ( )
if scan :
try :
2013-02-06 19:55:23 +00:00
soma , noids = librarysync . libraryScan ( )
2013-01-28 20:31:43 +00:00
except Exception , e :
logger . error ( ' Unable to complete the scan: %s ' % e )
2013-05-15 09:09:43 +00:00
return
2013-01-28 20:31:43 +00:00
if soma == " Completed " :
print ( " sucessfully completed import. " )
else :
2013-02-06 19:55:23 +00:00
logger . info ( u " Starting mass importing... " + str ( noids ) + " records. " )
2013-01-28 20:31:43 +00:00
#this is what it should do...
#store soma (the list of comic_details from importing) into sql table so import can be whenever
#display webpage showing results
#allow user to select comic to add (one at a time)
#call addComic off of the webpage to initiate the add.
#return to result page to finish or continue adding.
#....
#threading.Thread(target=self.searchit).start()
#threadthis = threadit.ThreadUrl()
#result = threadthis.main(soma)
myDB = db . DBConnection ( )
sl = 0
2013-02-06 19:55:23 +00:00
print ( " number of records: " + str ( noids ) )
while ( sl < int ( noids ) ) :
2013-01-28 20:31:43 +00:00
soma_sl = soma [ ' comic_info ' ] [ sl ]
2013-02-06 19:55:23 +00:00
print ( " soma_sl: " + str ( soma_sl ) )
2013-07-01 05:19:15 +00:00
print ( " comicname: " + soma_sl [ ' comicname ' ] . encode ( ' utf-8 ' ) )
print ( " filename: " + soma_sl [ ' comfilename ' ] . encode ( ' utf-8 ' ) )
2013-02-06 19:55:23 +00:00
controlValue = { " impID " : soma_sl [ ' impid ' ] }
2013-01-28 20:31:43 +00:00
newValue = { " ComicYear " : soma_sl [ ' comicyear ' ] ,
" Status " : " Not Imported " ,
2013-07-01 05:19:15 +00:00
" ComicName " : soma_sl [ ' comicname ' ] . encode ( ' utf-8 ' ) ,
" ComicFilename " : soma_sl [ ' comfilename ' ] . encode ( ' utf-8 ' ) ,
2013-02-06 19:55:23 +00:00
" ComicLocation " : soma_sl [ ' comlocation ' ] . encode ( ' utf-8 ' ) ,
2013-02-13 01:27:24 +00:00
" ImportDate " : helpers . today ( ) ,
" WatchMatch " : soma_sl [ ' watchmatch ' ] }
2013-01-28 20:31:43 +00:00
myDB . upsert ( " importresults " , newValue , controlValue )
sl + = 1
2013-02-06 19:55:23 +00:00
# because we could be adding volumes/series that span years, we need to account for this
# add the year to the db under the term, valid-years
# add the issue to the db under the term, min-issue
#locate metadata here.
# unzip -z filename.cbz will show the comment field of the zip which contains the metadata.
# unzip -z filename.cbz < /dev/null will remove the comment field, and thus the metadata.
2013-01-28 20:31:43 +00:00
2013-05-15 09:09:43 +00:00
#self.importResults()
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
raise cherrypy . HTTPRedirect ( " importResults " )
2013-01-28 20:31:43 +00:00
if redirect :
raise cherrypy . HTTPRedirect ( redirect )
else :
raise cherrypy . HTTPRedirect ( " home " )
comicScan . exposed = True
def importResults ( self ) :
myDB = db . DBConnection ( )
2013-02-13 01:27:24 +00:00
results = myDB . select ( " SELECT * FROM importresults WHERE WatchMatch is Null OR WatchMatch LIKE ' C % ' group by ComicName COLLATE NOCASE " )
2013-02-25 15:36:43 +00:00
#this is to get the count of issues;
for result in results :
countthis = myDB . action ( " SELECT count(*) FROM importresults WHERE ComicName=? " , [ result [ ' ComicName ' ] ] ) . fetchall ( )
countit = countthis [ 0 ] [ 0 ]
ctrlVal = { " ComicName " : result [ ' ComicName ' ] }
newVal = { " IssueCount " : countit }
myDB . upsert ( " importresults " , newVal , ctrlVal )
2013-05-06 02:34:01 +00:00
#logger.info("counted " + str(countit) + " issues for " + str(result['ComicName']))
2013-02-25 15:36:43 +00:00
#need to reload results now
results = myDB . select ( " SELECT * FROM importresults WHERE WatchMatch is Null OR WatchMatch LIKE ' C % ' group by ComicName COLLATE NOCASE " )
2013-02-13 01:27:24 +00:00
watchresults = myDB . select ( " SELECT * FROM importresults WHERE WatchMatch is not Null AND WatchMatch NOT LIKE ' C % ' group by ComicName COLLATE NOCASE " )
return serve_template ( templatename = " importresults.html " , title = " Import Results " , results = results , watchresults = watchresults )
2013-01-28 20:31:43 +00:00
importResults . exposed = True
2013-02-06 19:55:23 +00:00
2013-02-09 03:34:02 +00:00
def deleteimport ( self , ComicName ) :
myDB = db . DBConnection ( )
2013-03-08 01:36:36 +00:00
logger . info ( " Removing import data for Comic: " + ComicName )
2013-02-09 03:34:02 +00:00
myDB . action ( ' DELETE from importresults WHERE ComicName=? ' , [ ComicName ] )
raise cherrypy . HTTPRedirect ( " importResults " )
deleteimport . exposed = True
2013-05-06 02:34:01 +00:00
def preSearchit ( self , ComicName , comiclist = None , mimp = 0 ) :
2013-05-25 06:18:00 +00:00
implog = ' '
2013-07-30 04:57:37 +00:00
implog = implog + " imp_rename: " + str ( mylar . IMP_RENAME ) + " \n "
implog = implog + " imp_move: " + str ( mylar . IMP_MOVE ) + " \n "
2013-05-06 02:34:01 +00:00
if mimp == 0 :
comiclist = [ ]
comiclist . append ( ComicName )
for cl in comiclist :
ComicName = cl
2013-05-25 06:18:00 +00:00
implog = implog + " comicName: " + str ( ComicName ) + " \n "
2013-05-06 02:34:01 +00:00
myDB = db . DBConnection ( )
results = myDB . action ( " SELECT * FROM importresults WHERE ComicName=? " , [ ComicName ] )
#if results > 0:
# print ("There are " + str(results[7]) + " issues to import of " + str(ComicName))
#build the valid year ranges and the minimum issue# here to pass to search.
yearRANGE = [ ]
yearTOP = 0
minISSUE = 0
startISSUE = 10000000
comicstoIMP = [ ]
movealreadyonlist = " no "
movedata = [ ]
for result in results :
if result is None :
break
if result [ ' WatchMatch ' ] :
watchmatched = result [ ' WatchMatch ' ]
2013-02-13 01:27:24 +00:00
else :
2013-05-06 02:34:01 +00:00
watchmatched = ' '
if watchmatched . startswith ( ' C ' ) :
2013-05-25 06:18:00 +00:00
implog = implog + " Confirmed. ComicID already provided - initiating auto-magik mode for import. \n "
2013-05-06 02:34:01 +00:00
comicid = result [ ' WatchMatch ' ] [ 1 : ]
2013-05-25 06:18:00 +00:00
implog = implog + result [ ' WatchMatch ' ] + " .to. " + str ( comicid ) + " \n "
2013-05-06 02:34:01 +00:00
#since it's already in the watchlist, we just need to move the files and re-run the filechecker.
#self.refreshArtist(comicid=comicid,imported='yes')
if mylar . IMP_MOVE :
2013-05-25 06:18:00 +00:00
implog = implog + " Mass import - Move files \n "
2013-05-06 02:34:01 +00:00
comloc = myDB . action ( " SELECT * FROM comics WHERE ComicID=? " , [ comicid ] ) . fetchone ( )
movedata_comicid = comicid
movedata_comiclocation = comloc [ ' ComicLocation ' ]
movedata_comicname = ComicName
movealreadyonlist = " yes "
#mylar.moveit.movefiles(comicid,comloc['ComicLocation'],ComicName)
#check for existing files... (this is already called after move files in importer)
#updater.forceRescan(comicid)
else :
2013-05-25 06:18:00 +00:00
implog = implog + " nothing to do if I ' m not moving. \n "
2013-05-06 02:34:01 +00:00
raise cherrypy . HTTPRedirect ( " importResults " )
else :
comicstoIMP . append ( result [ ' ComicLocation ' ] . decode ( mylar . SYS_ENCODING , ' replace ' ) )
getiss = result [ ' impID ' ] . rfind ( ' - ' )
getiss = result [ ' impID ' ] [ getiss + 1 : ]
2013-05-25 06:18:00 +00:00
imlog = implog + " figured issue is : " + str ( getiss ) + " \n "
2013-05-06 02:34:01 +00:00
if ( result [ ' ComicYear ' ] not in yearRANGE ) or ( yearRANGE is None ) :
if result [ ' ComicYear ' ] < > " 0000 " :
2013-05-25 06:18:00 +00:00
implog = implog + " adding... " + str ( result [ ' ComicYear ' ] ) + " \n "
2013-05-06 02:34:01 +00:00
yearRANGE . append ( result [ ' ComicYear ' ] )
yearTOP = str ( result [ ' ComicYear ' ] )
if int ( getiss ) > int ( minISSUE ) :
2013-05-25 06:18:00 +00:00
implog = implog + " issue now set to : " + str ( getiss ) + " ... it was : " + str ( minISSUE ) + " \n "
2013-05-06 02:34:01 +00:00
minISSUE = str ( getiss )
if int ( getiss ) < int ( startISSUE ) :
2013-05-25 06:18:00 +00:00
implog = implog + " issue now set to : " + str ( getiss ) + " ... it was : " + str ( startISSUE ) + " \n "
2013-05-06 02:34:01 +00:00
startISSUE = str ( getiss )
2013-04-26 03:45:02 +00:00
2013-05-06 02:34:01 +00:00
#taking this outside of the transaction in an attempt to stop db locking.
if mylar . IMP_MOVE and movealreadyonlist == " yes " :
# for md in movedata:
mylar . moveit . movefiles ( movedata_comicid , movedata_comiclocation , movedata_comicname )
updater . forceRescan ( comicid )
raise cherrypy . HTTPRedirect ( " importResults " )
#figure out # of issues and the year range allowable
if yearTOP > 0 :
maxyear = int ( yearTOP ) - ( int ( minISSUE ) / 12 )
yearRANGE . append ( str ( maxyear ) )
2013-05-25 06:18:00 +00:00
implog = implog + " there is a " + str ( maxyear ) + " year variation based on the 12 issues/year \n "
2013-05-06 02:34:01 +00:00
else :
2013-05-25 06:18:00 +00:00
implog = implog + " no year detected in any issues...Nulling the value \n "
2013-05-06 02:34:01 +00:00
yearRANGE = None
#determine a best-guess to # of issues in series
#this needs to be reworked / refined ALOT more.
#minISSUE = highest issue #, startISSUE = lowest issue #
numissues = int ( minISSUE ) - int ( startISSUE )
#normally minissue would work if the issue #'s started at #1.
2013-05-25 06:18:00 +00:00
implog = implog + " the years involved are : " + str ( yearRANGE ) + " \n "
implog = implog + " highest issue # is : " + str ( minISSUE ) + " \n "
implog = implog + " lowest issue # is : " + str ( startISSUE ) + " \n "
implog = implog + " approximate number of issues : " + str ( numissues ) + " \n "
implog = implog + " issues present on system : " + str ( len ( comicstoIMP ) ) + " \n "
implog = implog + " versioning checking on filenames: \n "
2013-05-06 02:34:01 +00:00
cnsplit = ComicName . split ( )
#cnwords = len(cnsplit)
#cnvers = cnsplit[cnwords-1]
ogcname = ComicName
for splitt in cnsplit :
print ( " split " )
if ' v ' in str ( splitt ) :
2013-05-25 06:18:00 +00:00
implog = implog + " possible versioning detected. \n "
2013-05-06 02:34:01 +00:00
if splitt [ 1 : ] . isdigit ( ) :
2013-05-25 06:18:00 +00:00
implog = implog + splitt + " - assuming versioning. Removing from initial search pattern. \n "
2013-05-06 02:34:01 +00:00
ComicName = re . sub ( str ( splitt ) , ' ' , ComicName )
2013-05-25 06:18:00 +00:00
implog = implog + " new comicname is : " + ComicName + " \n "
2013-05-06 02:34:01 +00:00
# we need to pass the original comicname here into the entire importer module
# so that we can reference the correct issues later.
2013-02-09 03:34:02 +00:00
2013-05-06 02:34:01 +00:00
mode = ' series '
if yearRANGE is None :
sresults = mb . findComic ( ComicName , mode , issue = numissues )
else :
sresults = mb . findComic ( ComicName , mode , issue = numissues , limityear = yearRANGE )
type = ' comic '
2013-02-09 03:34:02 +00:00
if len ( sresults ) == 1 :
2013-02-25 15:36:43 +00:00
sr = sresults [ 0 ]
2013-05-25 06:18:00 +00:00
implog = implog + " only one result...automagik-mode enabled for " + ComicName + " :: " + str ( sr [ ' comicid ' ] ) + " \n "
2013-02-09 03:34:02 +00:00
resultset = 1
2013-05-06 02:34:01 +00:00
# #need to move the files here.
elif len ( sresults ) == 0 or len ( sresults ) is None :
2013-05-25 06:18:00 +00:00
implog = implog + " no results, removing the year from the agenda and re-querying. \n "
2013-05-06 02:34:01 +00:00
sresults = mb . findComic ( ComicName , mode , issue = numissues )
if len ( sresults ) == 1 :
sr = sresults [ 0 ]
2013-05-25 06:18:00 +00:00
implog = implog + " only one result...automagik-mode enabled for " + ComicName + " :: " + str ( sr [ ' comicid ' ] ) + " \n "
2013-05-06 02:34:01 +00:00
resultset = 1
else :
resultset = 0
else :
2013-05-25 06:18:00 +00:00
implog = implog + " returning results to screen - more than one possibility. \n "
2013-02-09 03:34:02 +00:00
resultset = 0
2013-05-25 06:18:00 +00:00
#write implog to db here.
print " Writing import log to db for viewing pleasure. "
ctrlVal = { " ComicName " : ComicName }
newVal = { " implog " : implog }
myDB . upsert ( " importresults " , newVal , ctrlVal )
2013-05-06 02:34:01 +00:00
if resultset == 1 :
2013-05-25 06:18:00 +00:00
#implog = implog + "ogcname -- " + str(ogcname) + "\n"
2013-05-06 02:34:01 +00:00
cresults = self . addComic ( comicid = sr [ ' comicid ' ] , comicname = sr [ ' name ' ] , comicyear = sr [ ' comicyear ' ] , comicpublisher = sr [ ' publisher ' ] , comicimage = sr [ ' comicimage ' ] , comicissues = sr [ ' issues ' ] , imported = ' yes ' , ogcname = ogcname ) #imported=comicstoIMP,ogcname=ogcname)
return serve_template ( templatename = " searchfix.html " , title = " Error Check " , comicname = sr [ ' name ' ] , comicid = sr [ ' comicid ' ] , comicyear = sr [ ' comicyear ' ] , comicimage = sr [ ' comicimage ' ] , comicissues = sr [ ' issues ' ] , cresults = cresults , imported = ' yes ' , ogcname = str ( ogcname ) )
else :
return serve_template ( templatename = " searchresults.html " , title = ' Import Results for: " ' + ComicName + ' " ' , searchresults = sresults , type = type , imported = ' yes ' , ogcname = ogcname ) #imported=comicstoIMP, ogcname=ogcname)
2013-02-06 19:55:23 +00:00
preSearchit . exposed = True
2014-01-16 20:25:02 +00:00
def pretty_git ( self , br_history ) :
#in order to 'prettify' the history log for display, we need to break it down so it's line by line.
br_split = br_history . split ( " \n " ) #split it on each commit
for br in br_split :
br_commit_st = br . find ( ' - ' ) #first - will represent end of commit numeric
br_commit = br [ : br_commit_st ] . strip ( )
br_time_en = br . replace ( ' - ' , ' XXX ' , 1 ) . find ( ' - ' ) #2nd - is end of time datestamp
br_time = br [ br_commit_st + 1 : br_time_en ] . strip ( )
print ' COMMIT: ' + str ( br_commit )
print ' TIME: ' + str ( br_time )
commit_split = br . split ( ) #split it by space to break it further down..
tag_chk = False
statline = ' '
commit = [ ]
for cs in commit_split :
if tag_chk == True :
if ' FIX: ' in cs or ' IMP: ' in cs :
commit . append ( { " commit " : br_commit ,
" time " : br_time ,
" stat " : tag_status ,
" line " : statline } )
print commit
tag_chk == False
statline = ' '
else :
statline + = str ( cs ) + ' '
else :
if ' FIX: ' in cs :
tag_status = ' FIX '
tag_chk = True
print ' status: ' + str ( tag_status )
elif ' IMP: ' in cs :
tag_status = ' IMPROVEMENT '
tag_chk = True
print ' status: ' + str ( tag_status )
pretty_git . exposed = True
2013-01-28 20:31:43 +00:00
#---
2012-09-13 15:27:34 +00:00
def config ( self ) :
interface_dir = os . path . join ( mylar . PROG_DIR , ' data/interfaces/ ' )
interface_list = [ name for name in os . listdir ( interface_dir ) if os . path . isdir ( os . path . join ( interface_dir , name ) ) ]
2014-01-16 20:25:02 +00:00
#----
# to be implemented in the future.
2013-01-13 15:59:46 +00:00
# branch_history, err = mylar.versioncheck.runGit("log --oneline --pretty=format:'%h - %ar - %s' -n 4")
2014-01-16 20:25:02 +00:00
# #here we pass the branch_history to the pretty_git module to break it down
# if branch_history:
# self.pretty_git(branch_history)
# br_hist = branch_history.replace("\n", "<br />\n")
# else:
# br_hist = err
#----
2013-02-17 10:31:18 +00:00
myDB = db . DBConnection ( )
CCOMICS = myDB . action ( " SELECT COUNT(*) FROM comics " ) . fetchall ( )
CHAVES = myDB . action ( " SELECT COUNT(*) FROM issues WHERE Status= ' Downloaded ' OR Status= ' Archived ' " ) . fetchall ( )
CISSUES = myDB . action ( " SELECT COUNT(*) FROM issues " ) . fetchall ( )
2013-02-25 15:36:43 +00:00
CSIZE = myDB . action ( " select SUM(ComicSize) from issues where Status= ' Downloaded ' or Status= ' Archived ' " ) . fetchall ( )
2013-02-17 10:31:18 +00:00
COUNT_COMICS = CCOMICS [ 0 ] [ 0 ]
COUNT_HAVES = CHAVES [ 0 ] [ 0 ]
COUNT_ISSUES = CISSUES [ 0 ] [ 0 ]
2013-02-25 15:36:43 +00:00
COUNT_SIZE = helpers . human_size ( CSIZE [ 0 ] [ 0 ] )
2013-02-17 10:31:18 +00:00
comicinfo = { " COUNT_COMICS " : COUNT_COMICS ,
" COUNT_HAVES " : COUNT_HAVES ,
2013-02-25 15:36:43 +00:00
" COUNT_ISSUES " : COUNT_ISSUES ,
" COUNT_SIZE " : COUNT_SIZE }
2013-01-11 21:20:51 +00:00
2012-09-13 15:27:34 +00:00
config = {
" http_host " : mylar . HTTP_HOST ,
" http_user " : mylar . HTTP_USERNAME ,
" http_port " : mylar . HTTP_PORT ,
" http_pass " : mylar . HTTP_PASSWORD ,
2012-10-30 10:43:01 +00:00
" launch_browser " : helpers . checked ( mylar . LAUNCH_BROWSER ) ,
2012-12-20 10:39:37 +00:00
" logverbose " : helpers . checked ( mylar . LOGVERBOSE ) ,
2012-09-13 15:27:34 +00:00
" download_scan_interval " : mylar . DOWNLOAD_SCAN_INTERVAL ,
" nzb_search_interval " : mylar . SEARCH_INTERVAL ,
2013-01-15 17:32:08 +00:00
" nzb_startup_search " : helpers . checked ( mylar . NZB_STARTUP_SEARCH ) ,
2012-09-13 15:27:34 +00:00
" libraryscan_interval " : mylar . LIBRARYSCAN_INTERVAL ,
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
" search_delay " : mylar . SEARCH_DELAY ,
2013-02-20 03:03:51 +00:00
" use_sabnzbd " : helpers . checked ( mylar . USE_SABNZBD ) ,
2012-09-13 15:27:34 +00:00
" sab_host " : mylar . SAB_HOST ,
" sab_user " : mylar . SAB_USERNAME ,
" sab_api " : mylar . SAB_APIKEY ,
" sab_pass " : mylar . SAB_PASSWORD ,
" sab_cat " : mylar . SAB_CATEGORY ,
2013-01-13 15:59:46 +00:00
" sab_priority " : mylar . SAB_PRIORITY ,
2013-02-09 03:34:02 +00:00
" sab_directory " : mylar . SAB_DIRECTORY ,
2013-02-20 03:03:51 +00:00
" use_nzbget " : helpers . checked ( mylar . USE_NZBGET ) ,
" nzbget_host " : mylar . NZBGET_HOST ,
" nzbget_port " : mylar . NZBGET_PORT ,
" nzbget_user " : mylar . NZBGET_USERNAME ,
" nzbget_pass " : mylar . NZBGET_PASSWORD ,
" nzbget_cat " : mylar . NZBGET_CATEGORY ,
" nzbget_priority " : mylar . NZBGET_PRIORITY ,
2012-09-13 15:27:34 +00:00
" use_blackhole " : helpers . checked ( mylar . BLACKHOLE ) ,
" blackhole_dir " : mylar . BLACKHOLE_DIR ,
" usenet_retention " : mylar . USENET_RETENTION ,
" use_nzbsu " : helpers . checked ( mylar . NZBSU ) ,
2013-08-04 05:57:21 +00:00
" nzbsu_uid " : mylar . NZBSU_UID ,
2012-09-13 15:27:34 +00:00
" nzbsu_api " : mylar . NZBSU_APIKEY ,
" use_dognzb " : helpers . checked ( mylar . DOGNZB ) ,
2013-08-04 05:57:21 +00:00
" dognzb_uid " : mylar . DOGNZB_UID ,
2012-09-13 15:27:34 +00:00
" dognzb_api " : mylar . DOGNZB_APIKEY ,
2013-01-11 21:20:51 +00:00
" use_nzbx " : helpers . checked ( mylar . NZBX ) ,
2012-09-13 15:27:34 +00:00
" use_experimental " : helpers . checked ( mylar . EXPERIMENTAL ) ,
2012-12-16 17:57:02 +00:00
" use_newznab " : helpers . checked ( mylar . NEWZNAB ) ,
" newznab_host " : mylar . NEWZNAB_HOST ,
2013-07-10 01:45:10 +00:00
" newznab_name " : mylar . NEWZNAB_NAME ,
2012-12-16 17:57:02 +00:00
" newznab_api " : mylar . NEWZNAB_APIKEY ,
2013-08-04 05:57:21 +00:00
" newznab_uid " : mylar . NEWZNAB_UID ,
2012-12-16 17:57:02 +00:00
" newznab_enabled " : helpers . checked ( mylar . NEWZNAB_ENABLED ) ,
" extra_newznabs " : mylar . EXTRA_NEWZNABS ,
2013-07-30 04:57:37 +00:00
" enable_rss " : helpers . checked ( mylar . ENABLE_RSS ) ,
" rss_checkinterval " : mylar . RSS_CHECKINTERVAL ,
2013-10-01 18:37:42 +00:00
" provider_order " : mylar . PROVIDER_ORDER ,
2013-07-30 04:57:37 +00:00
" enable_torrents " : helpers . checked ( mylar . ENABLE_TORRENTS ) ,
" torrent_local " : helpers . checked ( mylar . TORRENT_LOCAL ) ,
" local_watchdir " : mylar . LOCAL_WATCHDIR ,
" torrent_seedbox " : helpers . checked ( mylar . TORRENT_SEEDBOX ) ,
" seedbox_watchdir " : mylar . SEEDBOX_WATCHDIR ,
" seedbox_host " : mylar . SEEDBOX_HOST ,
" seedbox_port " : mylar . SEEDBOX_PORT ,
" seedbox_user " : mylar . SEEDBOX_USER ,
" seedbox_pass " : mylar . SEEDBOX_PASS ,
" enable_torrent_search " : helpers . checked ( mylar . ENABLE_TORRENT_SEARCH ) ,
" enable_kat " : helpers . checked ( mylar . ENABLE_KAT ) ,
" enable_cbt " : helpers . checked ( mylar . ENABLE_CBT ) ,
" cbt_passkey " : mylar . CBT_PASSKEY ,
2012-09-13 15:27:34 +00:00
" destination_dir " : mylar . DESTINATION_DIR ,
2013-04-22 03:43:57 +00:00
" chmod_dir " : mylar . CHMOD_DIR ,
" chmod_file " : mylar . CHMOD_FILE ,
2012-09-14 17:29:01 +00:00
" replace_spaces " : helpers . checked ( mylar . REPLACE_SPACES ) ,
" replace_char " : mylar . REPLACE_CHAR ,
2013-01-15 17:32:08 +00:00
" use_minsize " : helpers . checked ( mylar . USE_MINSIZE ) ,
2013-01-13 15:59:46 +00:00
" minsize " : mylar . MINSIZE ,
2013-01-15 17:32:08 +00:00
" use_maxsize " : helpers . checked ( mylar . USE_MAXSIZE ) ,
2013-01-13 15:59:46 +00:00
" maxsize " : mylar . MAXSIZE ,
2012-09-13 15:27:34 +00:00
" interface_list " : interface_list ,
" autowant_all " : helpers . checked ( mylar . AUTOWANT_ALL ) ,
" autowant_upcoming " : helpers . checked ( mylar . AUTOWANT_UPCOMING ) ,
2012-12-27 15:04:03 +00:00
" comic_cover_local " : helpers . checked ( mylar . COMIC_COVER_LOCAL ) ,
2013-10-04 03:44:57 +00:00
" pref_qual_0 " : helpers . radio ( int ( mylar . PREFERRED_QUALITY ) , 0 ) ,
" pref_qual_1 " : helpers . radio ( int ( mylar . PREFERRED_QUALITY ) , 1 ) ,
" pref_qual_2 " : helpers . radio ( int ( mylar . PREFERRED_QUALITY ) , 2 ) ,
2012-09-13 15:27:34 +00:00
" move_files " : helpers . checked ( mylar . MOVE_FILES ) ,
" rename_files " : helpers . checked ( mylar . RENAME_FILES ) ,
" folder_format " : mylar . FOLDER_FORMAT ,
" file_format " : mylar . FILE_FORMAT ,
2012-10-30 10:43:01 +00:00
" zero_level " : helpers . checked ( mylar . ZERO_LEVEL ) ,
" zero_level_n " : mylar . ZERO_LEVEL_N ,
2013-01-15 17:32:08 +00:00
" add_to_csv " : helpers . checked ( mylar . ADD_TO_CSV ) ,
2013-01-23 08:22:22 +00:00
" cvinfo " : helpers . checked ( mylar . CVINFO ) ,
2013-01-15 17:32:08 +00:00
" lowercase_filenames " : helpers . checked ( mylar . LOWERCASE_FILENAMES ) ,
2013-03-06 16:20:09 +00:00
" syno_fix " : helpers . checked ( mylar . SYNO_FIX ) ,
2013-05-01 18:25:46 +00:00
" cvapifix " : helpers . checked ( mylar . CVAPIFIX ) ,
2013-02-18 17:39:00 +00:00
" prowl_enabled " : helpers . checked ( mylar . PROWL_ENABLED ) ,
" prowl_onsnatch " : helpers . checked ( mylar . PROWL_ONSNATCH ) ,
" prowl_keys " : mylar . PROWL_KEYS ,
" prowl_priority " : mylar . PROWL_PRIORITY ,
" nma_enabled " : helpers . checked ( mylar . NMA_ENABLED ) ,
" nma_apikey " : mylar . NMA_APIKEY ,
" nma_priority " : int ( mylar . NMA_PRIORITY ) ,
" nma_onsnatch " : helpers . checked ( mylar . NMA_ONSNATCH ) ,
2013-04-22 04:13:56 +00:00
" pushover_enabled " : helpers . checked ( mylar . PUSHOVER_ENABLED ) ,
" pushover_onsnatch " : helpers . checked ( mylar . PUSHOVER_ONSNATCH ) ,
" pushover_apikey " : mylar . PUSHOVER_APIKEY ,
" pushover_userkey " : mylar . PUSHOVER_USERKEY ,
" pushover_priority " : mylar . PUSHOVER_PRIORITY ,
2013-09-07 06:05:32 +00:00
" boxcar_enabled " : helpers . checked ( mylar . BOXCAR_ENABLED ) ,
" boxcar_username " : mylar . BOXCAR_USERNAME ,
" boxcar_onsnatch " : helpers . checked ( mylar . BOXCAR_ONSNATCH ) ,
2012-12-27 15:04:03 +00:00
" enable_extra_scripts " : helpers . checked ( mylar . ENABLE_EXTRA_SCRIPTS ) ,
" extra_scripts " : mylar . EXTRA_SCRIPTS ,
2013-02-13 01:27:24 +00:00
" post_processing " : helpers . checked ( mylar . POST_PROCESSING ) ,
2013-07-01 05:19:15 +00:00
" enable_meta " : helpers . checked ( mylar . ENABLE_META ) ,
" cmtagger_path " : mylar . CMTAGGER_PATH ,
2013-01-11 21:20:51 +00:00
" branch " : version . MYLAR_VERSION ,
" br_type " : mylar . INSTALL_TYPE ,
" br_version " : mylar . versioncheck . getVersion ( ) ,
" py_version " : platform . python_version ( ) ,
" data_dir " : mylar . DATA_DIR ,
" prog_dir " : mylar . PROG_DIR ,
" cache_dir " : mylar . CACHE_DIR ,
2013-01-13 17:10:41 +00:00
" config_file " : mylar . CONFIG_FILE ,
2014-01-16 20:25:02 +00:00
" branch_history " : ' None ' ,
# "branch_history" : br_hist,
2013-01-13 17:10:41 +00:00
" enable_pre_scripts " : helpers . checked ( mylar . ENABLE_PRE_SCRIPTS ) ,
" pre_scripts " : mylar . PRE_SCRIPTS ,
" log_dir " : mylar . LOG_DIR
2012-09-13 15:27:34 +00:00
}
2013-02-17 10:31:18 +00:00
return serve_template ( templatename = " config.html " , title = " Settings " , config = config , comicinfo = comicinfo )
2012-09-13 15:27:34 +00:00
config . exposed = True
2013-01-11 21:20:51 +00:00
2013-03-29 04:02:35 +00:00
def error_change ( self , comicid , errorgcd , comicname , comicyear , imported = None , mogcname = None ) :
2013-01-29 09:02:23 +00:00
# if comicname contains a "," it will break the exceptions import.
import urllib
b = urllib . unquote_plus ( comicname )
2013-03-06 16:20:09 +00:00
# cname = b.decode("utf-8")
cname = b . encode ( ' utf-8 ' )
2013-01-29 09:02:23 +00:00
cname = re . sub ( " \ , " , " " , cname )
2013-03-30 17:31:13 +00:00
if mogcname != None :
c = urllib . unquote_plus ( mogcname )
ogcname = c . encode ( ' utf-8 ' )
else :
ogcname = None
2013-03-29 04:02:35 +00:00
2013-01-11 21:20:51 +00:00
if errorgcd [ : 5 ] . isdigit ( ) :
2013-03-30 17:31:13 +00:00
logger . info ( " GCD-ID detected : " + str ( errorgcd ) [ : 5 ] )
logger . info ( " ogcname: " + str ( ogcname ) )
logger . info ( " I ' m assuming you know what you ' re doing - going to force-match for " + cname )
2013-03-29 04:02:35 +00:00
self . from_Exceptions ( comicid = comicid , gcdid = errorgcd , comicname = cname , comicyear = comicyear , imported = imported , ogcname = ogcname )
2013-01-11 21:20:51 +00:00
else :
2013-03-30 17:31:13 +00:00
logger . info ( " Assuming rewording of Comic - adjusting to : " + str ( errorgcd ) )
2013-01-18 09:18:31 +00:00
Err_Info = mylar . cv . getComic ( comicid , ' comic ' )
2013-01-21 18:11:37 +00:00
self . addComic ( comicid = comicid , comicname = str ( errorgcd ) , comicyear = Err_Info [ ' ComicYear ' ] , comicissues = Err_Info [ ' ComicIssues ' ] , comicpublisher = Err_Info [ ' ComicPublisher ' ] )
2013-01-11 21:20:51 +00:00
error_change . exposed = True
2013-08-19 06:14:47 +00:00
def comic_config ( self , com_location , ComicID , alt_search = None , fuzzy_year = None , comic_version = None , force_continuing = None ) :
2012-09-28 15:39:44 +00:00
myDB = db . DBConnection ( )
2013-01-06 08:51:44 +00:00
#--- this is for multipe search terms............
#--- works, just need to redo search.py to accomodate multiple search terms
2013-07-30 04:57:37 +00:00
ffs_alt = [ ]
if ' ## ' in alt_search :
ffs = alt_search . find ( ' ## ' )
ffs_alt . append ( alt_search [ : ffs ] )
ffs_alt_st = str ( ffs_alt [ 0 ] )
print ( " ffs_alt: " + str ( ffs_alt [ 0 ] ) )
ffs_test = alt_search . split ( ' ## ' )
if len ( ffs_test ) > 0 :
print ( " ffs_test names: " + str ( len ( ffs_test ) ) )
ffs_count = len ( ffs_test )
n = 1
while ( n < ffs_count ) :
ffs_alt . append ( ffs_test [ n ] )
print ( " adding : " + str ( ffs_test [ n ] ) )
#print("ffs_alt : " + str(ffs_alt))
ffs_alt_st = str ( ffs_alt_st ) + " ... " + str ( ffs_test [ n ] )
n + = 1
asearch = ffs_alt
else :
asearch = alt_search
2013-01-06 08:51:44 +00:00
# ffs_alt = []
# if '+' in alt_search:
#find first +
# ffs = alt_search.find('+')
# ffs_alt.append(alt_search[:ffs])
# ffs_alt_st = str(ffs_alt[0])
# print("ffs_alt: " + str(ffs_alt[0]))
# split the entire string by the delimter +
# ffs_test = alt_search.split('+')
# if len(ffs_test) > 0:
# print("ffs_test names: " + str(len(ffs_test)))
# ffs_count = len(ffs_test)
# n=1
# while (n < ffs_count):
# ffs_alt.append(ffs_test[n])
# print("adding : " + str(ffs_test[n]))
#print("ffs_alt : " + str(ffs_alt))
# ffs_alt_st = str(ffs_alt_st) + "..." + str(ffs_test[n])
# n+=1
# asearch = ffs_alt
# else:
# asearch = alt_search
asearch = str ( alt_search )
2012-09-28 15:39:44 +00:00
controlValueDict = { ' ComicID ' : ComicID }
2013-01-18 07:32:05 +00:00
newValues = { " ComicLocation " : com_location }
2012-09-28 15:39:44 +00:00
#"QUALalt_vers": qual_altvers,
#"QUALScanner": qual_scanner,
#"QUALtype": qual_type,
#"QUALquality": qual_quality
#}
2013-01-18 07:32:05 +00:00
if asearch is not None :
2013-02-06 19:55:23 +00:00
if re . sub ( r ' \ s ' , ' ' , asearch ) == ' ' :
2013-01-18 07:32:05 +00:00
newValues [ ' AlternateSearch ' ] = " None "
else :
newValues [ ' AlternateSearch ' ] = str ( asearch )
2013-02-06 19:55:23 +00:00
else :
newValues [ ' AlternateSearch ' ] = " None "
2013-01-06 08:51:44 +00:00
2013-01-15 22:41:00 +00:00
if fuzzy_year is None :
newValues [ ' UseFuzzy ' ] = " 0 "
else :
newValues [ ' UseFuzzy ' ] = str ( fuzzy_year )
2013-03-08 03:07:14 +00:00
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
if comic_version is None or comic_version == ' None ' :
2013-03-08 03:07:14 +00:00
newValues [ ' ComicVersion ' ] = " None "
else :
if comic_version [ 1 : ] . isdigit ( ) and comic_version [ : 1 ] . lower ( ) == ' v ' :
newValues [ ' ComicVersion ' ] = str ( comic_version )
else :
logger . info ( " Invalid Versioning entered - it must be in the format of v# " )
newValues [ ' ComicVersion ' ] = " None "
2013-01-15 22:41:00 +00:00
2013-08-19 06:14:47 +00:00
if force_continuing is None :
newValues [ ' ForceContinuing ' ] = 0
else :
newValues [ ' ForceContinuing ' ] = 1
2012-12-20 11:52:21 +00:00
#force the check/creation of directory com_location here
if os . path . isdir ( str ( com_location ) ) :
logger . info ( u " Validating Directory ( " + str ( com_location ) + " ). Already exists! Continuing... " )
else :
logger . fdebug ( " Updated Directory doesn ' t exist! - attempting to create now. " )
2013-04-22 03:43:57 +00:00
#try:
# os.makedirs(str(com_location))
# logger.info(u"Directory successfully created at: " + str(com_location))
#except OSError:
# logger.error(u"Could not create comicdir : " + str(com_location))
filechecker . validateAndCreateDirectory ( com_location , True )
2012-12-20 11:52:21 +00:00
2012-09-28 15:39:44 +00:00
myDB . upsert ( " comics " , newValues , controlValueDict )
2013-07-10 01:45:10 +00:00
raise cherrypy . HTTPRedirect ( " comicDetails?ComicID= %s " % ComicID )
2012-09-28 15:39:44 +00:00
comic_config . exposed = True
2013-05-25 06:18:00 +00:00
def readOptions ( self , read2filename , storyarcdir ) :
mylar . READ2FILENAME = int ( read2filename )
mylar . STORYARCDIR = int ( storyarcdir )
mylar . config_write ( )
#force the check/creation of directory com_location here
if mylar . STORYARCDIR :
arcdir = os . path . join ( mylar . DESTINATION_DIR , ' StoryArcs ' )
if os . path . isdir ( str ( arcdir ) ) :
logger . info ( u " Validating Directory ( " + str ( arcdir ) + " ). Already exists! Continuing... " )
else :
logger . fdebug ( " Updated Directory doesn ' t exist! - attempting to create now. " )
filechecker . validateAndCreateDirectory ( arcdir , True )
readOptions . exposed = True
2012-09-13 15:27:34 +00:00
2013-01-15 17:32:08 +00:00
def configUpdate ( self , http_host = ' 0.0.0.0 ' , http_username = None , http_port = 8090 , http_password = None , launch_browser = 0 , logverbose = 0 , download_scan_interval = None , nzb_search_interval = None , nzb_startup_search = 0 , libraryscan_interval = None ,
2013-02-20 03:03:51 +00:00
use_sabnzbd = 0 , sab_host = None , sab_username = None , sab_apikey = None , sab_password = None , sab_category = None , sab_priority = None , sab_directory = None , log_dir = None , log_level = 0 , blackhole = 0 , blackhole_dir = None ,
use_nzbget = 0 , nzbget_host = None , nzbget_port = None , nzbget_username = None , nzbget_password = None , nzbget_category = None , nzbget_priority = None ,
2013-08-04 05:57:21 +00:00
usenet_retention = None , nzbsu = 0 , nzbsu_uid = None , nzbsu_apikey = None , dognzb = 0 , dognzb_uid = None , dognzb_apikey = None , nzbx = 0 , newznab = 0 , newznab_host = None , newznab_name = None , newznab_apikey = None , newznab_uid = None , newznab_enabled = 0 ,
2013-07-01 05:19:15 +00:00
raw = 0 , raw_provider = None , raw_username = None , raw_password = None , raw_groups = None , experimental = 0 ,
2013-07-30 04:57:37 +00:00
enable_meta = 0 , cmtagger_path = None , enable_rss = 0 , rss_checkinterval = None , enable_torrent_search = 0 , enable_kat = 0 , enable_cbt = 0 , cbt_passkey = None ,
enable_torrents = 0 , torrent_local = 0 , local_watchdir = None , torrent_seedbox = 0 , seedbox_watchdir = None , seedbox_user = None , seedbox_pass = None , seedbox_host = None , seedbox_port = None ,
2013-09-07 06:05:32 +00:00
prowl_enabled = 0 , prowl_onsnatch = 0 , prowl_keys = None , prowl_priority = None , nma_enabled = 0 , nma_apikey = None , nma_priority = 0 , nma_onsnatch = 0 , pushover_enabled = 0 , pushover_onsnatch = 0 , pushover_apikey = None , pushover_userkey = None , pushover_priority = None , boxcar_enabled = 0 , boxcar_username = None , boxcar_onsnatch = 0 ,
2013-05-01 18:25:46 +00:00
preferred_quality = 0 , move_files = 0 , rename_files = 0 , add_to_csv = 1 , cvinfo = 0 , lowercase_filenames = 0 , folder_format = None , file_format = None , enable_extra_scripts = 0 , extra_scripts = None , enable_pre_scripts = 0 , pre_scripts = None , post_processing = 0 , syno_fix = 0 , search_delay = None , chmod_dir = 0777 , chmod_file = 0660 , cvapifix = 0 ,
2013-01-13 15:59:46 +00:00
destination_dir = None , replace_spaces = 0 , replace_char = None , use_minsize = 0 , minsize = None , use_maxsize = 0 , maxsize = None , autowant_all = 0 , autowant_upcoming = 0 , comic_cover_local = 0 , zero_level = 0 , zero_level_n = None , interface = None , * * kwargs ) :
2012-09-13 15:27:34 +00:00
mylar . HTTP_HOST = http_host
mylar . HTTP_PORT = http_port
mylar . HTTP_USERNAME = http_username
mylar . HTTP_PASSWORD = http_password
mylar . LAUNCH_BROWSER = launch_browser
2012-12-20 10:39:37 +00:00
mylar . LOGVERBOSE = logverbose
2012-09-13 15:27:34 +00:00
mylar . DOWNLOAD_SCAN_INTERVAL = download_scan_interval
mylar . SEARCH_INTERVAL = nzb_search_interval
2013-01-15 17:32:08 +00:00
mylar . NZB_STARTUP_SEARCH = nzb_startup_search
2012-09-13 15:27:34 +00:00
mylar . LIBRARYSCAN_INTERVAL = libraryscan_interval
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
mylar . SEARCH_DELAY = search_delay
2013-02-20 03:03:51 +00:00
mylar . USE_SABNZBD = use_sabnzbd
2012-09-13 15:27:34 +00:00
mylar . SAB_HOST = sab_host
mylar . SAB_USERNAME = sab_username
mylar . SAB_PASSWORD = sab_password
mylar . SAB_APIKEY = sab_apikey
mylar . SAB_CATEGORY = sab_category
2012-09-28 15:39:44 +00:00
mylar . SAB_PRIORITY = sab_priority
2013-02-09 03:34:02 +00:00
mylar . SAB_DIRECTORY = sab_directory
2013-02-20 03:03:51 +00:00
mylar . USE_NZBGET = use_nzbget
mylar . NZBGET_HOST = nzbget_host
mylar . NZBGET_USERNAME = nzbget_username
mylar . NZBGET_PASSWORD = nzbget_password
mylar . NZBGET_PORT = nzbget_port
mylar . NZBGET_CATEGORY = nzbget_category
mylar . NZBGET_PRIORITY = nzbget_priority
2012-09-13 15:27:34 +00:00
mylar . BLACKHOLE = blackhole
mylar . BLACKHOLE_DIR = blackhole_dir
mylar . USENET_RETENTION = usenet_retention
mylar . NZBSU = nzbsu
2013-08-04 05:57:21 +00:00
mylar . NZBSU_UID = nzbsu_uid
2012-09-13 15:27:34 +00:00
mylar . NZBSU_APIKEY = nzbsu_apikey
mylar . DOGNZB = dognzb
2013-08-04 05:57:21 +00:00
mylar . DOGNZB_UID = dognzb_uid
2012-09-13 15:27:34 +00:00
mylar . DOGNZB_APIKEY = dognzb_apikey
2013-01-11 21:20:51 +00:00
mylar . NZBX = nzbx
2012-09-13 15:27:34 +00:00
mylar . RAW = raw
mylar . RAW_PROVIDER = raw_provider
mylar . RAW_USERNAME = raw_username
mylar . RAW_PASSWORD = raw_password
mylar . RAW_GROUPS = raw_groups
mylar . EXPERIMENTAL = experimental
2012-12-16 17:57:02 +00:00
mylar . NEWZNAB = newznab
2013-05-19 04:07:18 +00:00
#mylar.NEWZNAB_HOST = newznab_host
#mylar.NEWZNAB_APIKEY = newznab_apikey
#mylar.NEWZNAB_ENABLED = newznab_enabled
2013-07-30 04:57:37 +00:00
mylar . ENABLE_RSS = int ( enable_rss )
mylar . RSS_CHECKINTERVAL = rss_checkinterval
mylar . ENABLE_TORRENTS = int ( enable_torrents )
mylar . TORRENT_LOCAL = int ( torrent_local )
mylar . LOCAL_WATCHDIR = local_watchdir
mylar . TORRENT_SEEDBOX = int ( torrent_seedbox )
mylar . SEEDBOX_WATCHDIR = seedbox_watchdir
mylar . SEEDBOX_HOST = seedbox_host
mylar . SEEDBOX_PORT = seedbox_port
mylar . SEEDBOX_USER = seedbox_user
mylar . SEEDBOX_PASS = seedbox_pass
mylar . ENABLE_TORRENT_SEARCH = int ( enable_torrent_search )
mylar . ENABLE_KAT = int ( enable_kat )
mylar . ENABLE_CBT = int ( enable_cbt )
mylar . CBT_PASSKEY = cbt_passkey
2012-09-13 15:27:34 +00:00
mylar . PREFERRED_QUALITY = int ( preferred_quality )
mylar . MOVE_FILES = move_files
mylar . RENAME_FILES = rename_files
2012-09-14 17:29:01 +00:00
mylar . REPLACE_SPACES = replace_spaces
mylar . REPLACE_CHAR = replace_char
2012-10-30 10:43:01 +00:00
mylar . ZERO_LEVEL = zero_level
mylar . ZERO_LEVEL_N = zero_level_n
2013-01-15 17:32:08 +00:00
mylar . ADD_TO_CSV = add_to_csv
2013-01-23 08:22:22 +00:00
mylar . CVINFO = cvinfo
2013-01-14 05:12:59 +00:00
mylar . LOWERCASE_FILENAMES = lowercase_filenames
2013-03-06 16:20:09 +00:00
mylar . SYNO_FIX = syno_fix
2013-05-01 18:25:46 +00:00
mylar . CVAPIFIX = cvapifix
2013-02-18 17:39:00 +00:00
mylar . PROWL_ENABLED = prowl_enabled
mylar . PROWL_ONSNATCH = prowl_onsnatch
mylar . PROWL_KEYS = prowl_keys
mylar . PROWL_PRIORITY = prowl_priority
mylar . NMA_ENABLED = nma_enabled
mylar . NMA_APIKEY = nma_apikey
mylar . NMA_PRIORITY = nma_priority
mylar . NMA_ONSNATCH = nma_onsnatch
2013-04-22 04:13:56 +00:00
mylar . PUSHOVER_ENABLED = pushover_enabled
mylar . PUSHOVER_APIKEY = pushover_apikey
mylar . PUSHOVER_USERKEY = pushover_userkey
mylar . PUSHOVER_PRIORITY = pushover_priority
mylar . PUSHOVER_ONSNATCH = pushover_onsnatch
2013-09-07 06:05:32 +00:00
mylar . BOXCAR_ENABLED = boxcar_enabled
mylar . BOXCAR_USERNAME = boxcar_username
mylar . BOXCAR_ONSNATCH = boxcar_onsnatch
2013-01-13 15:59:46 +00:00
mylar . USE_MINSIZE = use_minsize
mylar . MINSIZE = minsize
mylar . USE_MAXSIZE = use_maxsize
mylar . MAXSIZE = maxsize
2012-09-13 15:27:34 +00:00
mylar . FOLDER_FORMAT = folder_format
mylar . FILE_FORMAT = file_format
mylar . DESTINATION_DIR = destination_dir
mylar . AUTOWANT_ALL = autowant_all
mylar . AUTOWANT_UPCOMING = autowant_upcoming
2012-12-27 15:04:03 +00:00
mylar . COMIC_COVER_LOCAL = comic_cover_local
2012-09-13 15:27:34 +00:00
mylar . INTERFACE = interface
2012-12-27 15:04:03 +00:00
mylar . ENABLE_EXTRA_SCRIPTS = enable_extra_scripts
mylar . EXTRA_SCRIPTS = extra_scripts
2013-01-13 17:10:41 +00:00
mylar . ENABLE_PRE_SCRIPTS = enable_pre_scripts
2013-02-13 01:27:24 +00:00
mylar . POST_PROCESSING = post_processing
2013-01-13 17:10:41 +00:00
mylar . PRE_SCRIPTS = pre_scripts
2013-07-01 05:19:15 +00:00
mylar . ENABLE_META = enable_meta
mylar . CMTAGGER_PATH = cmtagger_path
2012-09-13 15:27:34 +00:00
mylar . LOG_DIR = log_dir
2013-02-06 19:55:23 +00:00
mylar . LOG_LEVEL = log_level
2013-04-22 03:43:57 +00:00
mylar . CHMOD_DIR = chmod_dir
mylar . CHMOD_FILE = chmod_file
2012-12-16 17:57:02 +00:00
# Handle the variable config options. Note - keys with False values aren't getting passed
mylar . EXTRA_NEWZNABS = [ ]
2013-05-19 04:07:18 +00:00
#changing this for simplicty - adding all newznabs into extra_newznabs
if newznab_host is not None :
#this
2013-08-04 05:57:21 +00:00
mylar . EXTRA_NEWZNABS . append ( ( newznab_name , newznab_host , newznab_apikey , newznab_uid , int ( newznab_enabled ) ) )
2012-12-16 18:41:01 +00:00
2012-12-16 17:57:02 +00:00
for kwarg in kwargs :
2013-07-30 04:57:37 +00:00
if kwarg . startswith ( ' newznab_name ' ) :
2012-12-16 17:57:02 +00:00
newznab_number = kwarg [ 12 : ]
2013-07-30 04:57:37 +00:00
newznab_name = kwargs [ ' newznab_name ' + newznab_number ]
2012-12-16 17:57:02 +00:00
newznab_host = kwargs [ ' newznab_host ' + newznab_number ]
newznab_api = kwargs [ ' newznab_api ' + newznab_number ]
2013-08-04 05:57:21 +00:00
newznab_uid = kwargs [ ' newznab_uid ' + newznab_number ]
2012-12-16 17:57:02 +00:00
try :
newznab_enabled = int ( kwargs [ ' newznab_enabled ' + newznab_number ] )
except KeyError :
newznab_enabled = 0
2013-08-04 05:57:21 +00:00
mylar . EXTRA_NEWZNABS . append ( ( newznab_name , newznab_host , newznab_api , newznab_uid , newznab_enabled ) )
2012-12-16 18:41:01 +00:00
2012-12-16 17:57:02 +00:00
# Sanity checking
if mylar . SEARCH_INTERVAL < 360 :
logger . info ( " Search interval too low. Resetting to 6 hour minimum " )
mylar . SEARCH_INTERVAL = 360
2012-12-16 18:41:01 +00:00
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
if mylar . SEARCH_DELAY < 1 :
logger . info ( " Minimum search delay set for 1 minute to avoid hammering. " )
mylar . SEARCH_DELAY = 1
2013-07-30 04:57:37 +00:00
if mylar . RSS_CHECKINTERVAL < 20 :
logger . info ( " Minimum RSS Interval Check delay set for 20 minutes to avoid hammering. " )
mylar . RSS_CHECKINTERVAL = 20
2013-04-22 03:43:57 +00:00
if not helpers . is_number ( mylar . CHMOD_DIR ) :
logger . info ( " CHMOD Directory value is not a valid numeric - please correct. Defaulting to 0777 " )
mylar . CHMOD_DIR = ' 0777 '
if not helpers . is_number ( mylar . CHMOD_FILE ) :
logger . info ( " CHMOD File value is not a valid numeric - please correct. Defaulting to 0660 " )
mylar . CHMOD_FILE = ' 0660 '
2014-01-16 20:25:02 +00:00
if mylar . SAB_HOST . endswith ( ' / ' ) :
logger . info ( " Auto-correcting trailing slash in SABnzbd url (not required) " )
mylar . SAB_HOST = mylar . SAB_HOST [ : - 1 ]
2013-07-01 05:19:15 +00:00
if mylar . ENABLE_META :
if mylar . CMTAGGER_PATH is None or mylar . CMTAGGER_PATH == ' ' :
logger . info ( " ComicTagger Path not set - defaulting to Mylar Program Directory : " + mylar . PROG_DIR )
mylar . CMTAGGER_PATH = mylar . PROG_DIR
2013-09-18 04:49:24 +00:00
if ' comictagger.exe ' in mylar . CMTAGGER_PATH . lower ( ) or ' comictagger.py ' in mylar . CMTAGGER_PATH . lower ( ) :
mylar . CMTAGGER_PATH = re . sub ( os . path . basename ( mylar . CMTAGGER_PATH ) , ' ' , mylar . CMTAGGER_PATH )
logger . fdebug ( " Removed application name from ComicTagger path " )
2013-07-01 05:19:15 +00:00
2012-12-16 17:57:02 +00:00
# Write the config
2012-09-13 15:27:34 +00:00
mylar . config_write ( )
raise cherrypy . HTTPRedirect ( " config " )
configUpdate . exposed = True
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
def SABtest ( self ) :
logger . info ( ' testing SABnzbd connection ' )
if mylar . USE_SABNZBD :
import urllib2
from xml . dom . minidom import parseString
#if user/pass given, we can auto-fill the API ;)
if mylar . SAB_USERNAME is None or mylar . SAB_PASSWORD is None :
logger . info ( ' No Username / Password provided for SABnzbd credentials. Unable to auto-grab API key ' )
logger . info ( ' testing connection to SABnzbd @ ' + mylar . SAB_HOST )
logger . info ( ' SAB API Key (FULL API KEY): ' + mylar . SAB_APIKEY )
if mylar . SAB_HOST . endswith ( ' / ' ) :
sabhost = mylar . SAB_HOST
else :
sabhost = mylar . SAB_HOST + ' / '
querysab = sabhost + " api?mode=get_config§ion=misc&output=xml&apikey= " + mylar . SAB_APIKEY
file = urllib2 . urlopen ( querysab )
data = file . read ( )
file . close ( )
dom = parseString ( data )
try :
q_sabhost = dom . getElementsByTagName ( ' host ' ) [ 0 ] . firstChild . wholeText
q_nzbkey = dom . getElementsByTagName ( ' nzb_key ' ) [ 0 ] . firstChild . wholeText
q_apikey = dom . getElementsByTagName ( ' api_key ' ) [ 0 ] . firstChild . wholeText
except :
errorm = dom . getElementsByTagName ( ' error ' ) [ 0 ] . firstChild . wholeText
logger . error ( u " Error detected attempting to retrieve SAB data : " + errorm )
return
#test which apikey provided
if q_nzbkey != mylar . SAB_APIKEY :
if q_apikey != mylar . SAB_APIKEY :
logger . info ( ' API KEY provided does not match with SABnzbd ' )
else :
logger . info ( ' API KEY provided is FULL API KEY ' )
else :
logger . info ( ' API KEY provided is NZB API KEY ' )
logger . info ( ' Connection to SABnzbd tested sucessfully ' )
else :
logger . info ( ' You do not have anything stated for SAB Host. Please correct and try again. ' )
return
SABtest . exposed = True
2012-09-13 15:27:34 +00:00
def shutdown ( self ) :
mylar . SIGNAL = ' shutdown '
message = ' Shutting Down... '
return serve_template ( templatename = " shutdown.html " , title = " Shutting Down " , message = message , timer = 15 )
return page
shutdown . exposed = True
def restart ( self ) :
mylar . SIGNAL = ' restart '
message = ' Restarting... '
return serve_template ( templatename = " shutdown.html " , title = " Restarting " , message = message , timer = 30 )
restart . exposed = True
def update ( self ) :
mylar . SIGNAL = ' update '
2012-09-17 05:12:40 +00:00
message = ' Updating...<br/><small>Main screen will appear in 60s</small> '
2012-09-13 15:27:34 +00:00
return serve_template ( templatename = " shutdown.html " , title = " Updating " , message = message , timer = 30 )
return page
update . exposed = True
def getInfo ( self , ComicID = None , IssueID = None ) :
from mylar import cache
info_dict = cache . getInfo ( ComicID , IssueID )
return simplejson . dumps ( info_dict )
getInfo . exposed = True
def getComicArtwork ( self , ComicID = None , imageURL = None ) :
from mylar import cache
logger . info ( u " Retrieving image for : " + comicID )
return cache . getArtwork ( ComicID , imageURL )
getComicArtwork . exposed = True