2012-09-13 15:27:34 +00:00
# This file is part of Mylar.
#
# Mylar is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mylar is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mylar. If not, see <http://www.gnu.org/licenses/>.
import time
2012-10-16 08:16:29 +00:00
import os , errno
2012-09-13 15:27:34 +00:00
import sys
import shlex
import datetime
2012-09-18 20:33:09 +00:00
import re
2012-10-16 08:16:29 +00:00
import urllib
2012-12-27 15:04:03 +00:00
import shutil
2013-02-22 08:35:51 +00:00
import sqlite3
import cherrypy
2012-09-13 15:27:34 +00:00
import mylar
2013-03-21 17:09:10 +00:00
from mylar import logger , helpers , db , mb , albumart , cv , parseit , filechecker , search , updater , moveit , comicbookdb
2012-09-13 15:27:34 +00:00
def is_exists ( comicid ) :
myDB = db . DBConnection ( )
# See if the artist is already in the database
comiclist = myDB . select ( ' SELECT ComicID, ComicName from comics WHERE ComicID=? ' , [ comicid ] )
if any ( comicid in x for x in comiclist ) :
logger . info ( comiclist [ 0 ] [ 1 ] + u " is already in the database. " )
return False
else :
return False
2013-02-09 03:34:02 +00:00
def addComictoDB ( comicid , mismatch = None , pullupd = None , imported = None , ogcname = None ) :
2012-09-13 15:27:34 +00:00
# Putting this here to get around the circular import. Will try to use this to update images at later date.
2013-01-17 16:22:51 +00:00
# from mylar import cache
2012-09-13 15:27:34 +00:00
myDB = db . DBConnection ( )
# We need the current minimal info in the database instantly
# so we don't throw a 500 error when we redirect to the artistPage
controlValueDict = { " ComicID " : comicid }
dbcomic = myDB . action ( ' SELECT * FROM comics WHERE ComicID=? ' , [ comicid ] ) . fetchone ( )
if dbcomic is None :
newValueDict = { " ComicName " : " Comic ID: %s " % ( comicid ) ,
" Status " : " Loading " }
2012-10-16 08:16:29 +00:00
comlocation = None
2012-09-13 15:27:34 +00:00
else :
newValueDict = { " Status " : " Loading " }
2012-10-16 08:16:29 +00:00
comlocation = dbcomic [ ' ComicLocation ' ]
2012-09-13 15:27:34 +00:00
myDB . upsert ( " comics " , newValueDict , controlValueDict )
2013-03-14 08:33:16 +00:00
#run the re-sortorder here in order to properly display the page
2013-03-21 17:09:10 +00:00
if pullupd is None :
helpers . ComicSort ( comicorder = mylar . COMICSORT , imported = comicid )
2013-03-14 08:33:16 +00:00
2012-09-13 15:27:34 +00:00
# we need to lookup the info for the requested ComicID in full now
comic = cv . getComic ( comicid , ' comic ' )
2012-10-16 08:16:29 +00:00
#comic = myDB.action('SELECT * FROM comics WHERE ComicID=?', [comicid]).fetchone()
2012-09-13 15:27:34 +00:00
if not comic :
logger . warn ( " Error fetching comic. ID for : " + comicid )
if dbcomic is None :
newValueDict = { " ComicName " : " Fetch failed, try refreshing. ( %s ) " % ( comicid ) ,
" Status " : " Active " }
else :
newValueDict = { " Status " : " Active " }
myDB . upsert ( " comics " , newValueDict , controlValueDict )
return
if comic [ ' ComicName ' ] . startswith ( ' The ' ) :
sortname = comic [ ' ComicName ' ] [ 4 : ]
else :
sortname = comic [ ' ComicName ' ]
logger . info ( u " Now adding/updating: " + comic [ ' ComicName ' ] )
#--Now that we know ComicName, let's try some scraping
#--Start
# gcd will return issue details (most importantly publishing date)
2012-10-30 10:43:01 +00:00
if mismatch == " no " or mismatch is None :
2012-10-16 08:16:29 +00:00
gcdinfo = parseit . GCDScraper ( comic [ ' ComicName ' ] , comic [ ' ComicYear ' ] , comic [ ' ComicIssues ' ] , comicid )
2013-02-22 08:35:51 +00:00
#print ("gcdinfo: " + str(gcdinfo))
2012-10-16 08:16:29 +00:00
mismatch_com = " no "
if gcdinfo == " No Match " :
updater . no_searchresults ( comicid )
nomatch = " true "
logger . info ( u " There was an error when trying to add " + comic [ ' ComicName ' ] + " ( " + comic [ ' ComicYear ' ] + " ) " )
return nomatch
else :
mismatch_com = " yes "
2013-02-11 16:58:41 +00:00
#print ("gcdinfo:" + str(gcdinfo))
2012-10-16 08:16:29 +00:00
elif mismatch == " yes " :
CV_EXcomicid = myDB . action ( " SELECT * from exceptions WHERE ComicID=? " , [ comicid ] ) . fetchone ( )
if CV_EXcomicid [ ' variloop ' ] is None : pass
else :
vari_loop = CV_EXcomicid [ ' variloop ' ]
NewComicID = CV_EXcomicid [ ' NewComicID ' ]
gcomicid = CV_EXcomicid [ ' GComicID ' ]
resultURL = " /series/ " + str ( NewComicID ) + " / "
#print ("variloop" + str(CV_EXcomicid['variloop']))
#if vari_loop == '99':
gcdinfo = parseit . GCDdetails ( comseries = None , resultURL = resultURL , vari_loop = 0 , ComicID = comicid , TotalIssues = 0 , issvariation = " no " , resultPublished = None )
2012-09-13 15:27:34 +00:00
logger . info ( u " Sucessfully retrieved details for " + comic [ ' ComicName ' ] )
# print ("Series Published" + parseit.resultPublished)
2013-03-12 16:13:43 +00:00
#if the SeriesYear returned by CV is blank or none (0000), let's use the gcd one.
if comic [ ' ComicYear ' ] is None or comic [ ' ComicYear ' ] == ' 0000 ' :
SeriesYear = gcdinfo [ ' SeriesYear ' ]
else :
SeriesYear = comic [ ' ComicYear ' ]
2013-03-21 17:09:10 +00:00
#let's do the Annual check here.
if mylar . ANNUALS_ON :
annuals = comicbookdb . cbdb ( comic [ ' ComicName ' ] , SeriesYear )
print ( " Number of Annuals returned: " + str ( annuals [ ' totalissues ' ] ) )
nb = 0
while ( nb < = int ( annuals [ ' totalissues ' ] ) ) :
try :
annualval = annuals [ ' annualslist ' ] [ nb ]
except IndexError :
break
newCtrl = { " IssueID " : str ( annualval [ ' AnnualIssue ' ] + annualval [ ' AnnualDate ' ] ) }
newVals = { " Issue_Number " : annualval [ ' AnnualIssue ' ] ,
" IssueDate " : annualval [ ' AnnualDate ' ] ,
" ComicID " : comicid ,
" Status " : " skipped " }
myDB . upsert ( " annuals " , newVals , newCtrl )
nb + = 1
#parseit.annualCheck(gcomicid=gcdinfo['GCDComicID'], comicid=comicid, comicname=comic['ComicName'], comicyear=SeriesYear)
2012-09-13 15:27:34 +00:00
#comic book location on machine
# setup default location here
2012-10-16 08:16:29 +00:00
if comlocation is None :
2013-03-08 01:36:36 +00:00
# let's remove the non-standard characters here.
u_comicnm = comic [ ' ComicName ' ]
u_comicname = u_comicnm . encode ( ' ascii ' , ' ignore ' ) . strip ( )
if ' : ' in u_comicname or ' / ' in u_comicname or ' , ' in u_comicname :
comicdir = u_comicname
2012-10-20 16:14:32 +00:00
if ' : ' in comicdir :
comicdir = comicdir . replace ( ' : ' , ' ' )
if ' / ' in comicdir :
comicdir = comicdir . replace ( ' / ' , ' - ' )
2012-10-31 16:03:15 +00:00
if ' , ' in comicdir :
comicdir = comicdir . replace ( ' , ' , ' ' )
2013-03-08 01:36:36 +00:00
else : comicdir = u_comicname
2012-10-20 16:14:32 +00:00
2012-10-30 10:43:01 +00:00
series = comicdir
publisher = comic [ ' ComicPublisher ' ]
2013-03-12 16:13:43 +00:00
year = SeriesYear
2012-10-30 10:43:01 +00:00
#do work to generate folder path
values = { ' $Series ' : series ,
2013-01-14 05:12:59 +00:00
' $Publisher ' : publisher ,
' $Year ' : year ,
' $series ' : series . lower ( ) ,
' $publisher ' : publisher . lower ( ) ,
' $Volume ' : year
2012-10-30 10:43:01 +00:00
}
#print mylar.FOLDER_FORMAT
#print 'working dir:'
#print helpers.replace_all(mylar.FOLDER_FORMAT, values)
if mylar . FOLDER_FORMAT == ' ' :
2013-03-12 16:13:43 +00:00
comlocation = mylar . DESTINATION_DIR + " / " + comicdir + " ( " + SeriesYear + " ) "
2012-10-30 10:43:01 +00:00
else :
comlocation = mylar . DESTINATION_DIR + " / " + helpers . replace_all ( mylar . FOLDER_FORMAT , values )
#comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + comic['ComicYear'] + ")"
2012-10-16 08:16:29 +00:00
if mylar . DESTINATION_DIR == " " :
logger . error ( u " There is no general directory specified - please specify in Config/Post-Processing. " )
return
if mylar . REPLACE_SPACES :
#mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
comlocation = comlocation . replace ( ' ' , mylar . REPLACE_CHAR )
2013-03-21 17:09:10 +00:00
#moved this out of the above loop so it will chk for existance of comlocation in case moved
#if it doesn't exist - create it (otherwise will bugger up later on)
if os . path . isdir ( str ( comlocation ) ) :
logger . info ( u " Directory ( " + str ( comlocation ) + " ) already exists! Continuing... " )
else :
#print ("Directory doesn't exist!")
try :
os . makedirs ( str ( comlocation ) )
logger . info ( u " Directory successfully created at: " + str ( comlocation ) )
except OSError :
logger . error ( u " Could not create comicdir : " + str ( comlocation ) )
2012-09-13 15:27:34 +00:00
#try to account for CV not updating new issues as fast as GCD
#seems CV doesn't update total counts
#comicIssues = gcdinfo['totalissues']
2012-09-16 05:08:42 +00:00
if gcdinfo [ ' gcdvariation ' ] == " cv " :
comicIssues = str ( int ( comic [ ' ComicIssues ' ] ) + 1 )
2012-09-13 15:27:34 +00:00
else :
comicIssues = comic [ ' ComicIssues ' ]
2012-09-24 05:17:29 +00:00
2012-10-16 08:16:29 +00:00
#let's download the image...
if os . path . exists ( mylar . CACHE_DIR ) : pass
else :
#let's make the dir.
try :
os . makedirs ( str ( mylar . CACHE_DIR ) )
logger . info ( u " Cache Directory successfully created at: " + str ( mylar . CACHE_DIR ) )
2012-10-17 15:57:51 +00:00
except OSError :
logger . error ( ' Could not create cache dir. Check permissions of cache dir: ' + str ( mylar . CACHE_DIR ) )
2012-10-16 08:16:29 +00:00
2013-03-21 17:09:10 +00:00
coverfile = os . path . join ( mylar . CACHE_DIR , str ( comicid ) + " .jpg " )
2012-10-16 08:16:29 +00:00
#try:
urllib . urlretrieve ( str ( comic [ ' ComicImage ' ] ) , str ( coverfile ) )
try :
with open ( str ( coverfile ) ) as f :
2013-01-17 16:22:51 +00:00
ComicImage = os . path . join ( ' cache ' , str ( comicid ) + " .jpg " )
2013-03-21 17:09:10 +00:00
#this is for Firefox when outside the LAN...it works, but I don't know how to implement it
#without breaking the normal flow for inside the LAN (above)
#ComicImage = "http://" + str(mylar.HTTP_HOST) + ":" + str(mylar.HTTP_PORT) + "/cache/" + str(comicid) + ".jpg"
2013-03-08 01:36:36 +00:00
logger . info ( u " Sucessfully retrieved cover for " + comic [ ' ComicName ' ] )
2012-12-27 15:04:03 +00:00
#if the comic cover local is checked, save a cover.jpg to the series folder.
2013-01-04 03:12:58 +00:00
if mylar . COMIC_COVER_LOCAL :
comiclocal = os . path . join ( str ( comlocation ) + " /cover.jpg " )
shutil . copy ( ComicImage , comiclocal )
2012-10-16 08:16:29 +00:00
except IOError as e :
logger . error ( u " Unable to save cover locally at this time. " )
2013-03-11 17:25:45 +00:00
if comic [ ' ComicVersion ' ] . isdigit ( ) :
comicVol = " v " + comic [ ' ComicVersion ' ]
else :
comicVol = None
2012-10-16 08:16:29 +00:00
2012-09-13 15:27:34 +00:00
controlValueDict = { " ComicID " : comicid }
newValueDict = { " ComicName " : comic [ ' ComicName ' ] ,
" ComicSortName " : sortname ,
2013-03-12 16:13:43 +00:00
" ComicYear " : SeriesYear ,
2012-10-16 08:16:29 +00:00
" ComicImage " : ComicImage ,
2012-09-13 15:27:34 +00:00
" Total " : comicIssues ,
2013-03-11 17:25:45 +00:00
" ComicVersion " : comicVol ,
2012-09-13 15:27:34 +00:00
" ComicLocation " : comlocation ,
" ComicPublisher " : comic [ ' ComicPublisher ' ] ,
2012-10-16 08:16:29 +00:00
" ComicPublished " : gcdinfo [ ' resultPublished ' ] ,
2012-09-13 15:27:34 +00:00
" DateAdded " : helpers . today ( ) ,
" Status " : " Loading " }
myDB . upsert ( " comics " , newValueDict , controlValueDict )
2012-10-16 08:16:29 +00:00
2013-03-21 17:09:10 +00:00
#comicsort here...
#run the re-sortorder here in order to properly display the page
if pullupd is None :
helpers . ComicSort ( sequence = ' update ' )
2013-03-14 08:33:16 +00:00
2012-09-13 15:27:34 +00:00
issued = cv . getComic ( comicid , ' issue ' )
logger . info ( u " Sucessfully retrieved issue details for " + comic [ ' ComicName ' ] )
n = 0
iscnt = int ( comicIssues )
issid = [ ]
issnum = [ ]
issname = [ ]
issdate = [ ]
int_issnum = [ ]
#let's start issue #'s at 0 -- thanks to DC for the new 52 reboot! :)
latestiss = " 0 "
latestdate = " 0000-00-00 "
#print ("total issues:" + str(iscnt))
#---removed NEW code here---
2012-10-16 08:16:29 +00:00
logger . info ( u " Now adding/updating issues for " + comic [ ' ComicName ' ] )
2012-09-13 15:27:34 +00:00
# file check to see if issue exists
logger . info ( u " Checking directory for existing issues. " )
2012-09-18 20:33:09 +00:00
#fc = filechecker.listFiles(dir=comlocation, watchcomic=comic['ComicName'])
#havefiles = 0
2012-09-13 15:27:34 +00:00
2012-09-18 20:33:09 +00:00
#fccnt = int(fc['comiccount'])
#logger.info(u"Found " + str(fccnt) + "/" + str(iscnt) + " issues of " + comic['ComicName'] + "...verifying")
#fcnew = []
2013-02-18 18:50:26 +00:00
if iscnt > 0 : #if a series is brand new, it wont have any issues/details yet so skip this part
while ( n < = iscnt ) :
2012-09-13 15:27:34 +00:00
#---NEW.code
2013-02-18 18:50:26 +00:00
try :
firstval = issued [ ' issuechoice ' ] [ n ]
2012-09-14 01:21:21 +00:00
except IndexError :
break
2013-02-18 18:50:26 +00:00
cleanname = helpers . cleanName ( firstval [ ' Issue_Name ' ] )
issid = str ( firstval [ ' Issue_ID ' ] )
issnum = str ( firstval [ ' Issue_Number ' ] )
2013-03-29 04:02:35 +00:00
#print ("issnum: " + str(issnum))
2013-02-18 18:50:26 +00:00
issname = cleanname
if ' . ' in str ( issnum ) :
issn_st = str ( issnum ) . find ( ' . ' )
issn_b4dec = str ( issnum ) [ : issn_st ]
2012-09-13 15:27:34 +00:00
#if the length of decimal is only 1 digit, assume it's a tenth
2013-02-18 18:50:26 +00:00
dec_is = str ( issnum ) [ issn_st + 1 : ]
if len ( dec_is ) == 1 :
dec_nisval = int ( dec_is ) * 10
iss_naftdec = str ( dec_nisval )
if len ( dec_is ) == 2 :
dec_nisval = int ( dec_is )
iss_naftdec = str ( dec_nisval )
iss_issue = issn_b4dec + " . " + iss_naftdec
issis = ( int ( issn_b4dec ) * 1000 ) + dec_nisval
2013-03-29 04:02:35 +00:00
elif ' au ' in issnum . lower ( ) :
print ( " au detected " )
stau = issnum . lower ( ) . find ( ' au ' )
issnum_au = issnum [ : stau ]
print ( " issnum_au: " + str ( issnum_au ) )
#account for Age of Ultron mucked up numbering
issis = str ( int ( issnum_au ) * 1000 ) + ' AU '
2013-02-18 18:50:26 +00:00
else : issis = int ( issnum ) * 1000
bb = 0
while ( bb < = iscnt ) :
try :
gcdval = gcdinfo [ ' gcdchoice ' ] [ bb ]
2013-03-29 04:02:35 +00:00
#print ("gcdval: " + str(gcdval))
2013-02-18 18:50:26 +00:00
except IndexError :
#account for gcd variation here
if gcdinfo [ ' gcdvariation ' ] == ' gcd ' :
2013-02-22 08:35:51 +00:00
#logger.fdebug("gcd-variation accounted for.")
2013-02-18 18:50:26 +00:00
issdate = ' 0000-00-00 '
int_issnum = int ( issis / 1000 )
2012-09-13 15:27:34 +00:00
break
2013-02-18 18:50:26 +00:00
if ' nn ' in str ( gcdval [ ' GCDIssue ' ] ) :
#no number detected - GN, TP or the like
logger . warn ( u " Non Series detected (Graphic Novel, etc) - cannot proceed at this time. " )
updater . no_searchresults ( comicid )
return
2013-03-29 04:02:35 +00:00
elif ' au ' in gcdval [ ' GCDIssue ' ] . lower ( ) :
#account for Age of Ultron mucked up numbering - this is in format of 5AU.00
gstau = gcdval [ ' GCDIssue ' ] . lower ( ) . find ( ' au ' )
gcdis_au = gcdval [ ' GCDIssue ' ] [ : gstau ]
gcdis = str ( int ( gcdis_au ) * 1000 ) + ' AU '
2013-02-18 18:50:26 +00:00
elif ' . ' in str ( gcdval [ ' GCDIssue ' ] ) :
2013-02-22 08:35:51 +00:00
#logger.fdebug("g-issue:" + str(gcdval['GCDIssue']))
2013-02-18 18:50:26 +00:00
issst = str ( gcdval [ ' GCDIssue ' ] ) . find ( ' . ' )
2013-02-22 08:35:51 +00:00
#logger.fdebug("issst:" + str(issst))
2013-02-18 18:50:26 +00:00
issb4dec = str ( gcdval [ ' GCDIssue ' ] ) [ : issst ]
2013-02-22 08:35:51 +00:00
#logger.fdebug("issb4dec:" + str(issb4dec))
2013-02-18 18:50:26 +00:00
#if the length of decimal is only 1 digit, assume it's a tenth
decis = str ( gcdval [ ' GCDIssue ' ] ) [ issst + 1 : ]
2013-02-22 08:35:51 +00:00
#logger.fdebug("decis:" + str(decis))
2013-02-18 18:50:26 +00:00
if len ( decis ) == 1 :
decisval = int ( decis ) * 10
issaftdec = str ( decisval )
if len ( decis ) == 2 :
decisval = int ( decis )
issaftdec = str ( decisval )
gcd_issue = issb4dec + " . " + issaftdec
2013-02-22 08:35:51 +00:00
#logger.fdebug("gcd_issue:" + str(gcd_issue))
2013-03-14 08:33:16 +00:00
try :
gcdis = ( int ( issb4dec ) * 1000 ) + decisval
except ValueError :
logger . error ( " This has no issue # ' s for me to get - Either a Graphic Novel or one-shot. This feature to allow these will be added in the near future. " )
updater . no_searchresults ( comicid )
return
2013-03-29 04:02:35 +00:00
elif ' au ' in gcdval [ ' GCDIssue ' ] . lower ( ) :
#account for Age of Ultron mucked up numbering
gstau = gcdval [ ' GCDIssue ' ] . lower ( ) . find ( ' au ' )
gcdis_au = gcdval [ ' GCDIssue ' ] [ : gstau ]
gcdis = str ( int ( gcdis_au ) * 1000 ) + ' AU '
print ( " gcdis : " + str ( gcdis ) )
2013-02-18 18:50:26 +00:00
else :
gcdis = int ( str ( gcdval [ ' GCDIssue ' ] ) ) * 1000
if gcdis == issis :
issdate = str ( gcdval [ ' GCDDate ' ] )
2013-03-29 04:02:35 +00:00
if str ( issis ) . isdigit ( ) :
int_issnum = int ( gcdis / 1000 )
else :
if ' au ' in issis . lower ( ) :
int_issnum = str ( int ( gcdis [ : - 2 ] ) / 1000 ) + ' AU '
else :
logger . error ( " this has an alpha-numeric in the issue # which I cannot account for. Get on github and log the issue for evilhero. " )
return
2013-02-18 18:50:26 +00:00
#get the latest issue / date using the date.
if gcdval [ ' GCDDate ' ] > latestdate :
latestiss = str ( issnum )
latestdate = str ( gcdval [ ' GCDDate ' ] )
break
#bb = iscnt
bb + = 1
#print("(" + str(n) + ") IssueID: " + str(issid) + " IssueNo: " + str(issnum) + " Date" + str(issdate))
#---END.NEW.
# check if the issue already exists
iss_exists = myDB . action ( ' SELECT * from issues WHERE IssueID=? ' , [ issid ] ) . fetchone ( )
# Only change the status & add DateAdded if the issue is already in the database
if iss_exists is None :
newValueDict [ ' DateAdded ' ] = helpers . today ( )
controlValueDict = { " IssueID " : issid }
newValueDict = { " ComicID " : comicid ,
" ComicName " : comic [ ' ComicName ' ] ,
" IssueName " : issname ,
" Issue_Number " : issnum ,
" IssueDate " : issdate ,
" Int_IssueNumber " : int_issnum
}
if mylar . AUTOWANT_ALL :
newValueDict [ ' Status ' ] = " Wanted "
elif issdate > helpers . today ( ) and mylar . AUTOWANT_UPCOMING :
newValueDict [ ' Status ' ] = " Wanted "
else :
newValueDict [ ' Status ' ] = " Skipped "
2012-09-13 15:27:34 +00:00
2013-02-18 18:50:26 +00:00
if iss_exists :
#print ("Existing status : " + str(iss_exists['Status']))
newValueDict [ ' Status ' ] = iss_exists [ ' Status ' ]
2013-01-24 17:01:27 +00:00
2013-02-22 08:35:51 +00:00
try :
myDB . upsert ( " issues " , newValueDict , controlValueDict )
except sqlite3 . InterfaceError , e :
#raise sqlite3.InterfaceError(e)
logger . error ( " MAJOR error trying to get issue data, this is most likey a MULTI-VOLUME series and you need to use the custom_exceptions.csv file. " )
myDB . action ( " DELETE FROM comics WHERE ComicID=? " , [ comicid ] )
return
2013-02-18 18:50:26 +00:00
n + = 1
2012-09-13 15:27:34 +00:00
# logger.debug(u"Updating comic cache for " + comic['ComicName'])
# cache.getThumb(ComicID=issue['issueid'])
2012-09-18 20:33:09 +00:00
# logger.debug(u"Updating cache for: " + comic['ComicName'])
# cache.getThumb(ComicIDcomicid)
2012-09-13 15:27:34 +00:00
controlValueStat = { " ComicID " : comicid }
newValueStat = { " Status " : " Active " ,
" LatestIssue " : latestiss ,
2012-10-07 16:20:06 +00:00
" LatestDate " : latestdate ,
" LastUpdated " : helpers . now ( )
2012-09-13 15:27:34 +00:00
}
myDB . upsert ( " comics " , newValueStat , controlValueStat )
2013-01-23 08:22:22 +00:00
if mylar . CVINFO :
if not os . path . exists ( comlocation + " /cvinfo " ) :
with open ( comlocation + " /cvinfo " , " w " ) as text_file :
2013-03-08 01:36:36 +00:00
text_file . write ( " http://www.comicvine.com/volume/49- " + str ( comicid ) )
2012-09-13 15:27:34 +00:00
logger . info ( u " Updating complete for: " + comic [ ' ComicName ' ] )
2013-02-09 03:34:02 +00:00
#move the files...if imported is not empty (meaning it's not from the mass importer.)
if imported is None or imported == ' None ' :
pass
else :
if mylar . IMP_MOVE :
logger . info ( " Mass import - Move files " )
2013-02-13 01:27:24 +00:00
moveit . movefiles ( comicid , comlocation , ogcname )
2013-02-17 10:31:18 +00:00
else :
logger . info ( " Mass import - Moving not Enabled. Setting Archived Status for import. " )
moveit . archivefiles ( comicid , ogcname )
2013-02-06 19:55:23 +00:00
#check for existing files...
updater . forceRescan ( comicid )
2013-01-02 16:57:28 +00:00
if pullupd is None :
2013-01-18 09:18:31 +00:00
# lets' check the pullist for anything at this time as well since we're here.
# do this for only Present comics....
if mylar . AUTOWANT_UPCOMING and ' Present ' in gcdinfo [ ' resultPublished ' ] :
2013-03-08 01:36:36 +00:00
logger . info ( u " Checking this week ' s pullist for new issues of " + comic [ ' ComicName ' ] )
2013-01-02 16:57:28 +00:00
updater . newpullcheck ( comic [ ' ComicName ' ] , comicid )
2012-09-24 05:17:29 +00:00
2012-09-13 15:27:34 +00:00
#here we grab issues that have been marked as wanted above...
2013-01-01 20:09:28 +00:00
results = myDB . select ( " SELECT * FROM issues where ComicID=? AND Status= ' Wanted ' " , [ comicid ] )
if results :
logger . info ( u " Attempting to grab wanted issues for : " + comic [ ' ComicName ' ] )
2012-09-13 15:27:34 +00:00
2013-01-01 20:09:28 +00:00
for result in results :
foundNZB = " none "
2013-01-18 09:18:31 +00:00
if ( mylar . NZBSU or mylar . DOGNZB or mylar . EXPERIMENTAL or mylar . NEWZNAB or mylar . NZBX ) and ( mylar . SAB_HOST ) :
2013-01-01 20:09:28 +00:00
foundNZB = search . searchforissue ( result [ ' IssueID ' ] )
if foundNZB == " yes " :
updater . foundsearch ( result [ ' ComicID ' ] , result [ ' IssueID ' ] )
else : logger . info ( u " No issues marked as wanted for " + comic [ ' ComicName ' ] )
2012-09-13 15:27:34 +00:00
2013-01-01 20:09:28 +00:00
logger . info ( u " Finished grabbing what I could. " )
2012-10-16 08:16:29 +00:00
2013-03-26 07:50:00 +00:00
def GCDimport ( gcomicid , pullupd = None , imported = None , ogcname = None ) :
2012-10-16 08:16:29 +00:00
# this is for importing via GCD only and not using CV.
# used when volume spanning is discovered for a Comic (and can't be added using CV).
# Issue Counts are wrong (and can't be added).
# because Comicvine ComicID and GCD ComicID could be identical at some random point, let's distinguish.
# CV = comicid, GCD = gcomicid :) (ie. CV=2740, GCD=G3719)
gcdcomicid = gcomicid
myDB = db . DBConnection ( )
# We need the current minimal info in the database instantly
# so we don't throw a 500 error when we redirect to the artistPage
controlValueDict = { " ComicID " : gcdcomicid }
2013-01-07 19:26:10 +00:00
comic = myDB . action ( ' SELECT ComicName, ComicYear, Total, ComicPublished, ComicImage, ComicLocation, ComicPublisher FROM comics WHERE ComicID=? ' , [ gcomicid ] ) . fetchone ( )
2012-10-16 08:16:29 +00:00
ComicName = comic [ 0 ]
ComicYear = comic [ 1 ]
ComicIssues = comic [ 2 ]
2013-01-18 09:18:31 +00:00
ComicPublished = comic [ 3 ]
2012-10-16 08:16:29 +00:00
comlocation = comic [ 5 ]
2013-01-07 19:26:10 +00:00
ComicPublisher = comic [ 6 ]
2012-10-16 08:16:29 +00:00
#ComicImage = comic[4]
#print ("Comic:" + str(ComicName))
newValueDict = { " Status " : " Loading " }
myDB . upsert ( " comics " , newValueDict , controlValueDict )
# we need to lookup the info for the requested ComicID in full now
#comic = cv.getComic(comicid,'comic')
if not comic :
logger . warn ( " Error fetching comic. ID for : " + gcdcomicid )
if dbcomic is None :
newValueDict = { " ComicName " : " Fetch failed, try refreshing. ( %s ) " % ( gcdcomicid ) ,
" Status " : " Active " }
else :
newValueDict = { " Status " : " Active " }
myDB . upsert ( " comics " , newValueDict , controlValueDict )
return
2013-03-26 07:50:00 +00:00
#run the re-sortorder here in order to properly display the page
if pullupd is None :
helpers . ComicSort ( comicorder = mylar . COMICSORT , imported = gcomicid )
2012-10-16 08:16:29 +00:00
if ComicName . startswith ( ' The ' ) :
sortname = ComicName [ 4 : ]
else :
sortname = ComicName
logger . info ( u " Now adding/updating: " + ComicName )
#--Now that we know ComicName, let's try some scraping
#--Start
# gcd will return issue details (most importantly publishing date)
comicid = gcomicid [ 1 : ]
resultURL = " /series/ " + str ( comicid ) + " / "
gcdinfo = parseit . GCDdetails ( comseries = None , resultURL = resultURL , vari_loop = 0 , ComicID = gcdcomicid , TotalIssues = ComicIssues , issvariation = None , resultPublished = None )
if gcdinfo == " No Match " :
logger . warn ( " No matching result found for " + ComicName + " ( " + ComicYear + " ) " )
updater . no_searchresults ( gcomicid )
nomatch = " true "
return nomatch
logger . info ( u " Sucessfully retrieved details for " + ComicName )
# print ("Series Published" + parseit.resultPublished)
#--End
ComicImage = gcdinfo [ ' ComicImage ' ]
#comic book location on machine
# setup default location here
if comlocation is None :
2013-03-08 01:36:36 +00:00
# let's remove the non-standard characters here.
2013-03-26 07:50:00 +00:00
u_comicnm = ComicName
2013-03-08 01:36:36 +00:00
u_comicname = u_comicnm . encode ( ' ascii ' , ' ignore ' ) . strip ( )
if ' : ' in u_comicname or ' / ' in u_comicname or ' , ' in u_comicname :
comicdir = u_comicname
2012-10-20 16:14:32 +00:00
if ' : ' in comicdir :
comicdir = comicdir . replace ( ' : ' , ' ' )
if ' / ' in comicdir :
comicdir = comicdir . replace ( ' / ' , ' - ' )
2012-10-31 16:03:15 +00:00
if ' , ' in comicdir :
comicdir = comicdir . replace ( ' , ' , ' ' )
2013-03-08 01:36:36 +00:00
else : comicdir = u_comicname
2013-01-07 19:26:10 +00:00
series = comicdir
publisher = ComicPublisher
year = ComicYear
#do work to generate folder path
values = { ' $Series ' : series ,
2013-01-14 05:12:59 +00:00
' $Publisher ' : publisher ,
' $Year ' : year ,
' $series ' : series . lower ( ) ,
' $publisher ' : publisher . lower ( ) ,
' $Volume ' : year
2013-01-07 19:26:10 +00:00
}
if mylar . FOLDER_FORMAT == ' ' :
comlocation = mylar . DESTINATION_DIR + " / " + comicdir + " ( " + comic [ ' ComicYear ' ] + " ) "
else :
comlocation = mylar . DESTINATION_DIR + " / " + helpers . replace_all ( mylar . FOLDER_FORMAT , values )
#comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + ComicYear + ")"
2012-10-16 08:16:29 +00:00
if mylar . DESTINATION_DIR == " " :
logger . error ( u " There is no general directory specified - please specify in Config/Post-Processing. " )
return
if mylar . REPLACE_SPACES :
#mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
comlocation = comlocation . replace ( ' ' , mylar . REPLACE_CHAR )
2013-03-26 07:50:00 +00:00
#if it doesn't exist - create it (otherwise will bugger up later on)
if os . path . isdir ( str ( comlocation ) ) :
logger . info ( u " Directory ( " + str ( comlocation ) + " ) already exists! Continuing... " )
else :
#print ("Directory doesn't exist!")
try :
os . makedirs ( str ( comlocation ) )
logger . info ( u " Directory successfully created at: " + str ( comlocation ) )
except OSError :
logger . error ( u " Could not create comicdir : " + str ( comlocation ) )
2012-10-17 15:57:51 +00:00
2012-10-16 08:16:29 +00:00
comicIssues = gcdinfo [ ' totalissues ' ]
#let's download the image...
if os . path . exists ( mylar . CACHE_DIR ) : pass
else :
#let's make the dir.
try :
os . makedirs ( str ( mylar . CACHE_DIR ) )
logger . info ( u " Cache Directory successfully created at: " + str ( mylar . CACHE_DIR ) )
2012-10-17 15:57:51 +00:00
except OSError :
logger . error ( u " Could not create cache dir : " + str ( mylar . CACHE_DIR ) )
2012-10-16 08:16:29 +00:00
2013-03-26 07:50:00 +00:00
coverfile = os . path . join ( mylar . CACHE_DIR , str ( gcomicid ) + " .jpg " )
2012-10-16 08:16:29 +00:00
2013-03-26 07:50:00 +00:00
#try:
2012-10-16 08:16:29 +00:00
urllib . urlretrieve ( str ( ComicImage ) , str ( coverfile ) )
try :
with open ( str ( coverfile ) ) as f :
2013-03-26 07:50:00 +00:00
ComicImage = os . path . join ( ' cache ' , str ( gcomicid ) + " .jpg " )
#this is for Firefox when outside the LAN...it works, but I don't know how to implement it
#without breaking the normal flow for inside the LAN (above)
#ComicImage = "http://" + str(mylar.HTTP_HOST) + ":" + str(mylar.HTTP_PORT) + "/cache/" + str(comi$
2013-03-08 01:36:36 +00:00
logger . info ( u " Sucessfully retrieved cover for " + ComicName )
2013-03-26 07:50:00 +00:00
#if the comic cover local is checked, save a cover.jpg to the series folder.
if mylar . COMIC_COVER_LOCAL :
comiclocal = os . path . join ( str ( comlocation ) + " /cover.jpg " )
shutil . copy ( ComicImage , comiclocal )
2012-10-16 08:16:29 +00:00
except IOError as e :
logger . error ( u " Unable to save cover locally at this time. " )
2013-03-26 07:50:00 +00:00
#if comic['ComicVersion'].isdigit():
# comicVol = "v" + comic['ComicVersion']
#else:
# comicVol = None
2012-10-16 08:16:29 +00:00
controlValueDict = { " ComicID " : gcomicid }
newValueDict = { " ComicName " : ComicName ,
" ComicSortName " : sortname ,
" ComicYear " : ComicYear ,
" Total " : comicIssues ,
" ComicLocation " : comlocation ,
2013-03-26 07:50:00 +00:00
#"ComicVersion": comicVol,
2012-10-16 08:16:29 +00:00
" ComicImage " : ComicImage ,
#"ComicPublisher": comic['ComicPublisher'],
#"ComicPublished": comicPublished,
" DateAdded " : helpers . today ( ) ,
" Status " : " Loading " }
myDB . upsert ( " comics " , newValueDict , controlValueDict )
2013-03-26 07:50:00 +00:00
#comicsort here...
#run the re-sortorder here in order to properly display the page
if pullupd is None :
helpers . ComicSort ( sequence = ' update ' )
2012-10-16 08:16:29 +00:00
logger . info ( u " Sucessfully retrieved issue details for " + ComicName )
n = 0
iscnt = int ( comicIssues )
issnum = [ ]
issname = [ ]
issdate = [ ]
int_issnum = [ ]
#let's start issue #'s at 0 -- thanks to DC for the new 52 reboot! :)
latestiss = " 0 "
latestdate = " 0000-00-00 "
#print ("total issues:" + str(iscnt))
#---removed NEW code here---
logger . info ( u " Now adding/updating issues for " + ComicName )
bb = 0
while ( bb < = iscnt ) :
#---NEW.code
try :
gcdval = gcdinfo [ ' gcdchoice ' ] [ bb ]
2012-10-16 15:53:46 +00:00
#print ("gcdval: " + str(gcdval))
2012-10-16 08:16:29 +00:00
except IndexError :
#account for gcd variation here
if gcdinfo [ ' gcdvariation ' ] == ' gcd ' :
#print ("gcd-variation accounted for.")
issdate = ' 0000-00-00 '
int_issnum = int ( issis / 1000 )
break
if ' nn ' in str ( gcdval [ ' GCDIssue ' ] ) :
#no number detected - GN, TP or the like
logger . warn ( u " Non Series detected (Graphic Novel, etc) - cannot proceed at this time. " )
updater . no_searchresults ( comicid )
return
elif ' . ' in str ( gcdval [ ' GCDIssue ' ] ) :
issst = str ( gcdval [ ' GCDIssue ' ] ) . find ( ' . ' )
issb4dec = str ( gcdval [ ' GCDIssue ' ] ) [ : issst ]
#if the length of decimal is only 1 digit, assume it's a tenth
decis = str ( gcdval [ ' GCDIssue ' ] ) [ issst + 1 : ]
if len ( decis ) == 1 :
decisval = int ( decis ) * 10
issaftdec = str ( decisval )
if len ( decis ) == 2 :
decisval = int ( decis )
issaftdec = str ( decisval )
if int ( issaftdec ) == 0 : issaftdec = " 00 "
gcd_issue = issb4dec + " . " + issaftdec
gcdis = ( int ( issb4dec ) * 1000 ) + decisval
else :
gcdis = int ( str ( gcdval [ ' GCDIssue ' ] ) ) * 1000
gcd_issue = str ( gcdval [ ' GCDIssue ' ] )
#get the latest issue / date using the date.
int_issnum = int ( gcdis / 1000 )
issdate = str ( gcdval [ ' GCDDate ' ] )
issid = " G " + str ( gcdval [ ' IssueID ' ] )
if gcdval [ ' GCDDate ' ] > latestdate :
latestiss = str ( gcd_issue )
latestdate = str ( gcdval [ ' GCDDate ' ] )
#print("(" + str(bb) + ") IssueID: " + str(issid) + " IssueNo: " + str(gcd_issue) + " Date" + str(issdate) )
#---END.NEW.
# check if the issue already exists
2012-10-31 16:03:15 +00:00
iss_exists = myDB . action ( ' SELECT * from issues WHERE IssueID=? ' , [ issid ] ) . fetchone ( )
2012-10-16 08:16:29 +00:00
# Only change the status & add DateAdded if the issue is not already in the database
2012-10-31 16:03:15 +00:00
if iss_exists is None :
2012-10-16 08:16:29 +00:00
newValueDict [ ' DateAdded ' ] = helpers . today ( )
#adjust for inconsistencies in GCD date format - some dates have ? which borks up things.
if " ? " in str ( issdate ) :
issdate = " 0000-00-00 "
controlValueDict = { " IssueID " : issid }
newValueDict = { " ComicID " : gcomicid ,
" ComicName " : ComicName ,
" Issue_Number " : gcd_issue ,
" IssueDate " : issdate ,
" Int_IssueNumber " : int_issnum
}
#print ("issueid:" + str(controlValueDict))
#print ("values:" + str(newValueDict))
if mylar . AUTOWANT_ALL :
newValueDict [ ' Status ' ] = " Wanted "
2013-01-01 20:32:22 +00:00
elif issdate > helpers . today ( ) and mylar . AUTOWANT_UPCOMING :
newValueDict [ ' Status ' ] = " Wanted "
2012-10-16 08:16:29 +00:00
else :
newValueDict [ ' Status ' ] = " Skipped "
2012-10-21 15:30:26 +00:00
if iss_exists :
#print ("Existing status : " + str(iss_exists['Status']))
newValueDict [ ' Status ' ] = iss_exists [ ' Status ' ]
2012-10-16 08:16:29 +00:00
myDB . upsert ( " issues " , newValueDict , controlValueDict )
bb + = 1
# logger.debug(u"Updating comic cache for " + ComicName)
# cache.getThumb(ComicID=issue['issueid'])
# logger.debug(u"Updating cache for: " + ComicName)
# cache.getThumb(ComicIDcomicid)
controlValueStat = { " ComicID " : gcomicid }
newValueStat = { " Status " : " Active " ,
" LatestIssue " : latestiss ,
" LatestDate " : latestdate ,
" LastUpdated " : helpers . now ( )
}
myDB . upsert ( " comics " , newValueStat , controlValueStat )
2013-01-23 08:22:22 +00:00
if mylar . CVINFO :
if not os . path . exists ( comlocation + " /cvinfo " ) :
with open ( comlocation + " /cvinfo " , " w " ) as text_file :
2013-03-08 01:36:36 +00:00
text_file . write ( " http://www.comicvine.com/volume/49- " + str ( comicid ) )
2013-01-23 08:22:22 +00:00
2012-10-16 08:16:29 +00:00
logger . info ( u " Updating complete for: " + ComicName )
2013-03-26 07:50:00 +00:00
#move the files...if imported is not empty (meaning it's not from the mass importer.)
if imported is None or imported == ' None ' :
pass
else :
if mylar . IMP_MOVE :
logger . info ( " Mass import - Move files " )
moveit . movefiles ( gcomicid , comlocation , ogcname )
else :
logger . info ( " Mass import - Moving not Enabled. Setting Archived Status for import. " )
moveit . archivefiles ( gcomicid , ogcname )
#check for existing files...
updater . forceRescan ( gcomicid )
2013-01-02 16:57:28 +00:00
if pullupd is None :
2013-01-01 20:09:28 +00:00
# lets' check the pullist for anyting at this time as well since we're here.
2013-01-18 09:18:31 +00:00
if mylar . AUTOWANT_UPCOMING and ' Present ' in ComicPublished :
2013-03-08 01:36:36 +00:00
logger . info ( u " Checking this week ' s pullist for new issues of " + ComicName )
2013-01-06 08:51:44 +00:00
updater . newpullcheck ( comic [ ' ComicName ' ] , gcomicid )
2012-10-16 08:16:29 +00:00
2013-01-01 20:09:28 +00:00
#here we grab issues that have been marked as wanted above...
2012-10-16 08:16:29 +00:00
2013-01-01 20:09:28 +00:00
results = myDB . select ( " SELECT * FROM issues where ComicID=? AND Status= ' Wanted ' " , [ gcomicid ] )
if results :
logger . info ( u " Attempting to grab wanted issues for : " + ComicName )
2012-10-16 08:16:29 +00:00
2013-01-01 20:09:28 +00:00
for result in results :
foundNZB = " none "
2013-01-18 09:18:31 +00:00
if ( mylar . NZBSU or mylar . DOGNZB or mylar . EXPERIMENTAL or mylar . NEWZNAB or mylar . NZBX ) and ( mylar . SAB_HOST ) :
2013-01-01 20:09:28 +00:00
foundNZB = search . searchforissue ( result [ ' IssueID ' ] )
if foundNZB == " yes " :
updater . foundsearch ( result [ ' ComicID ' ] , result [ ' IssueID ' ] )
else : logger . info ( u " No issues marked as wanted for " + ComicName )
2012-10-16 08:16:29 +00:00
2013-01-01 20:09:28 +00:00
logger . info ( u " Finished grabbing what I could. " )
2012-10-16 08:16:29 +00:00