2012-09-13 15:27:34 +00:00
|
|
|
# This file is part of Mylar.
|
|
|
|
#
|
|
|
|
# Mylar is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Mylar is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Mylar. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
|
|
|
|
from __future__ import print_function
|
|
|
|
|
2012-12-27 15:04:03 +00:00
|
|
|
import sys
|
|
|
|
import fileinput
|
|
|
|
import csv
|
|
|
|
import getopt
|
|
|
|
import sqlite3
|
|
|
|
import urllib
|
|
|
|
import os
|
|
|
|
import time
|
2012-09-13 15:27:34 +00:00
|
|
|
import re
|
|
|
|
|
|
|
|
import mylar
|
|
|
|
from mylar import db, updater, helpers, logger
|
|
|
|
|
2013-04-08 16:31:41 +00:00
|
|
|
def pullit(forcecheck=None):
|
2012-09-13 15:27:34 +00:00
|
|
|
myDB = db.DBConnection()
|
2012-12-27 15:04:03 +00:00
|
|
|
popit = myDB.select("SELECT count(*) FROM sqlite_master WHERE name='weekly' and type='table'")
|
2012-09-13 15:27:34 +00:00
|
|
|
if popit:
|
2012-12-27 15:04:03 +00:00
|
|
|
try:
|
2013-01-15 17:32:08 +00:00
|
|
|
pull_date = myDB.action("SELECT SHIPDATE from weekly").fetchone()
|
|
|
|
logger.info(u"Weekly pull list present - checking if it's up-to-date..")
|
|
|
|
pulldate = pull_date['SHIPDATE']
|
2013-01-23 07:50:49 +00:00
|
|
|
except (sqlite3.OperationalError, TypeError),msg:
|
2012-12-27 15:04:03 +00:00
|
|
|
conn=sqlite3.connect(mylar.DB_FILE)
|
|
|
|
c=conn.cursor()
|
|
|
|
logger.info(u"Error Retrieving weekly pull list - attempting to adjust")
|
|
|
|
c.execute('DROP TABLE weekly')
|
|
|
|
c.execute('CREATE TABLE IF NOT EXISTS weekly (SHIPDATE text, PUBLISHER text, ISSUE text, COMIC VARCHAR(150), EXTRA text, STATUS text)')
|
|
|
|
pulldate = '00000000'
|
2012-09-13 15:27:34 +00:00
|
|
|
else:
|
|
|
|
logger.info(u"No pullist found...I'm going to try and get a new list now.")
|
|
|
|
pulldate = '00000000'
|
2012-12-27 15:04:03 +00:00
|
|
|
if pulldate is None: pulldate = '00000000'
|
2012-09-13 15:27:34 +00:00
|
|
|
PULLURL = 'http://www.previewsworld.com/shipping/newreleases.txt'
|
|
|
|
#PULLURL = 'http://www.previewsworld.com/Archive/GetFile/1/1/71/994/081512.txt'
|
|
|
|
|
|
|
|
not_these=['PREVIEWS',
|
|
|
|
'Shipping',
|
|
|
|
'Every Wednesday',
|
|
|
|
'Please check with',
|
|
|
|
'PREMIER PUBLISHERS',
|
|
|
|
'BOOKS',
|
|
|
|
'COLLECTIBLES',
|
|
|
|
'MCFARLANE TOYS',
|
2013-02-25 20:25:10 +00:00
|
|
|
'New Releases',
|
|
|
|
'Upcoming Releases']
|
2012-09-13 15:27:34 +00:00
|
|
|
|
|
|
|
excludes=['2ND PTG',
|
|
|
|
'3RD PTG',
|
|
|
|
'4TH PTG',
|
|
|
|
'5TH PTG',
|
|
|
|
'NEW PTG',
|
|
|
|
'POSTER',
|
|
|
|
'COMBO PACK']
|
|
|
|
|
|
|
|
# this checks for the following lists
|
|
|
|
# first need to only look for checkit variables
|
|
|
|
checkit=['COMICS',
|
|
|
|
'IDW PUBLISHING',
|
|
|
|
'MAGAZINES',
|
|
|
|
'MERCHANDISE']
|
|
|
|
|
|
|
|
#if COMICS is found, determine which publisher
|
|
|
|
checkit2=['DC',
|
|
|
|
'MARVEL',
|
|
|
|
'DARK HORSE',
|
|
|
|
'IMAGE']
|
|
|
|
# used to determine type of comic (one shot, hardcover, tradeback, softcover, graphic novel)
|
|
|
|
cmty=['HC',
|
|
|
|
'TP',
|
|
|
|
'GN',
|
|
|
|
'SC',
|
|
|
|
'ONE SHOT',
|
|
|
|
'PI']
|
|
|
|
|
|
|
|
pub = "COMICS"
|
|
|
|
prevcomic = ""
|
|
|
|
previssue = ""
|
|
|
|
|
|
|
|
#newtxtfile header info ("SHIPDATE\tPUBLISHER\tISSUE\tCOMIC\tEXTRA\tSTATUS\n")
|
|
|
|
#STATUS denotes default status to be applied to pulllist in Mylar (default = Skipped)
|
|
|
|
newrl = mylar.CACHE_DIR + "/newreleases.txt"
|
2012-12-27 15:04:03 +00:00
|
|
|
f = urllib.urlretrieve(PULLURL, newrl)
|
|
|
|
# local_file = open(newrl, "wb")
|
|
|
|
# local_file.write(f.read())
|
|
|
|
# local_file.close
|
2012-09-13 15:27:34 +00:00
|
|
|
|
|
|
|
newfl = mylar.CACHE_DIR + "/Clean-newreleases.txt"
|
|
|
|
newtxtfile = open(newfl, 'wb')
|
|
|
|
|
|
|
|
for i in open(newrl):
|
|
|
|
if not i.strip():
|
|
|
|
continue
|
|
|
|
if 'MAGAZINES' in i: break
|
|
|
|
if 'MERCHANDISE' in i: break
|
|
|
|
for nono in not_these:
|
|
|
|
if nono in i:
|
|
|
|
#let's try and grab the date for future pull checks
|
2013-02-25 20:25:10 +00:00
|
|
|
if i.startswith('Shipping') or i.startswith('New Releases') or i.startswith('Upcoming Releases'):
|
2012-09-13 15:27:34 +00:00
|
|
|
shipdatechk = i.split()
|
2012-12-27 15:04:03 +00:00
|
|
|
if i.startswith('Shipping'):
|
2012-09-13 15:27:34 +00:00
|
|
|
shipdate = shipdatechk[1]
|
2012-12-27 15:04:03 +00:00
|
|
|
elif i.startswith('New Releases'):
|
2012-09-13 15:27:34 +00:00
|
|
|
shipdate = shipdatechk[3]
|
2013-02-25 20:25:10 +00:00
|
|
|
elif i.startswith('Upcoming Releases'):
|
|
|
|
shipdate = shipdatechk[3]
|
2012-09-13 15:27:34 +00:00
|
|
|
sdsplit = shipdate.split('/')
|
|
|
|
mo = sdsplit[0]
|
|
|
|
dy = sdsplit[1]
|
|
|
|
if len(mo) == 1: mo = "0" + sdsplit[0]
|
|
|
|
if len(dy) == 1: dy = "0" + sdsplit[1]
|
|
|
|
shipdate = sdsplit[2] + "-" + mo + "-" + dy
|
|
|
|
shipdaterep = shipdate.replace('-', '')
|
2012-12-27 15:04:03 +00:00
|
|
|
pulldate = re.sub('-', '', str(pulldate))
|
2012-09-13 15:27:34 +00:00
|
|
|
#print ("shipdate: " + str(shipdaterep))
|
|
|
|
#print ("today: " + str(pulldate))
|
|
|
|
if pulldate == shipdaterep:
|
|
|
|
logger.info(u"No new pull-list available - will re-check again in 24 hours.")
|
2012-09-24 05:17:29 +00:00
|
|
|
pullitcheck()
|
2013-02-11 17:07:15 +00:00
|
|
|
mylar.PULLNEW = 'no'
|
2012-09-13 15:27:34 +00:00
|
|
|
return
|
2013-01-15 17:32:08 +00:00
|
|
|
else:
|
|
|
|
logger.info(u"Preparing to update to the new listing.")
|
2012-09-13 15:27:34 +00:00
|
|
|
break
|
|
|
|
else:
|
2013-02-11 17:07:15 +00:00
|
|
|
mylar.PULLNEW = 'yes'
|
2012-09-13 15:27:34 +00:00
|
|
|
for yesyes in checkit:
|
|
|
|
if yesyes in i:
|
|
|
|
if format(str(yesyes)) == 'COMICS':
|
|
|
|
for chkchk in checkit2:
|
|
|
|
flagged = "no"
|
|
|
|
if chkchk in i:
|
|
|
|
bl = i.split()
|
|
|
|
blchk = str(bl[0]) + " " + str(bl[1])
|
|
|
|
if chkchk in blchk:
|
|
|
|
pub = format(str(chkchk)) + " COMICS"
|
|
|
|
#print (pub)
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
if i.find("COMICS") < 1 and "GRAPHIC NOVELS" in i:
|
|
|
|
pub = "COMICS"
|
|
|
|
#print (pub)
|
|
|
|
break
|
|
|
|
elif i.find("COMICS") > 12:
|
|
|
|
#print ("comics word found in comic title")
|
|
|
|
flagged = "yes"
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
pub = format(str(yesyes))
|
|
|
|
#print (pub)
|
|
|
|
break
|
|
|
|
if flagged == "no":
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
dupefound = "no"
|
|
|
|
if '#' in i:
|
|
|
|
issname = i.split()
|
|
|
|
#print (issname)
|
|
|
|
issnamec = len(issname)
|
|
|
|
n = 0
|
|
|
|
while (n < issnamec):
|
|
|
|
#find the issue
|
|
|
|
if '#' in (issname[n]):
|
|
|
|
if issname[n] == "PI":
|
|
|
|
issue = "NA"
|
|
|
|
break
|
|
|
|
issue = issname[n]
|
2013-04-10 03:17:07 +00:00
|
|
|
if 'ongoing' not in issname[n-1].lower() and '(vu)' not in issname[n-1].lower():
|
2013-02-19 07:57:41 +00:00
|
|
|
#print ("issue found : " + issname[n])
|
|
|
|
comicend = n - 1
|
|
|
|
else:
|
|
|
|
comicend = n - 2
|
2012-09-13 15:27:34 +00:00
|
|
|
break
|
|
|
|
n+=1
|
|
|
|
if issue == "": issue = 'NA'
|
|
|
|
#find comicname
|
|
|
|
comicnm = issname[1]
|
|
|
|
n = 2
|
|
|
|
while (n < comicend + 1):
|
|
|
|
comicnm = comicnm + " " + issname[n]
|
|
|
|
n+=1
|
|
|
|
#print ("Comicname: " + str(comicnm) )
|
|
|
|
#get remainder
|
|
|
|
comicrm = issname[comicend +2]
|
|
|
|
if '$' in comicrm:
|
|
|
|
comicrm="None"
|
|
|
|
n = (comicend + 3)
|
|
|
|
while (n < issnamec):
|
|
|
|
if '$' in (issname[n]):
|
|
|
|
break
|
|
|
|
comicrm = str(comicrm) + " " + str(issname[n])
|
|
|
|
n+=1
|
|
|
|
#print ("Comic Extra info: " + str(comicrm) )
|
|
|
|
#print ("ship: " + str(shipdate))
|
|
|
|
#print ("pub: " + str(pub))
|
|
|
|
#print ("issue: " + str(issue))
|
2012-12-27 15:04:03 +00:00
|
|
|
#--let's make sure we don't wipe out decimal issues ;)
|
2013-04-02 08:56:24 +00:00
|
|
|
if '.' in issue:
|
|
|
|
issue_decimal = re.compile(r'[^\d.]+')
|
|
|
|
issue = issue_decimal.sub('', str(issue))
|
|
|
|
else: issue = re.sub('#','', issue)
|
2012-12-27 15:04:03 +00:00
|
|
|
#issue = re.sub("\D", "", str(issue))
|
2012-09-13 15:27:34 +00:00
|
|
|
#store the previous comic/issue for comparison to filter out duplicate issues/alt covers
|
|
|
|
#print ("Previous Comic & Issue: " + str(prevcomic) + "--" + str(previssue))
|
|
|
|
dupefound = "no"
|
|
|
|
else:
|
|
|
|
#if it doesn't have a '#' in the line, then we know it's either
|
|
|
|
#a special edition of some kind, or a non-comic
|
|
|
|
issname = i.split()
|
|
|
|
#print (issname)
|
|
|
|
issnamec = len(issname)
|
|
|
|
n = 1
|
|
|
|
issue = ''
|
|
|
|
while (n < issnamec):
|
|
|
|
#find the type of non-issue (TP,HC,GN,SC,OS,PI etc)
|
|
|
|
for cm in cmty:
|
|
|
|
if "ONE" in issue and "SHOT" in issname[n+1]: issue = "OS"
|
|
|
|
if cm == (issname[n]):
|
|
|
|
if issname[n] == 'PI':
|
|
|
|
issue = 'NA'
|
|
|
|
break
|
|
|
|
issue = issname[n]
|
|
|
|
#print ("non-issue found : " + issue)
|
|
|
|
comicend = n - 1
|
|
|
|
break
|
|
|
|
n+=1
|
|
|
|
#if the comic doesn't have an issue # or a keyword, adjust.
|
|
|
|
#set it to 'NA' and it'll be filtered out anyways.
|
|
|
|
if issue == "" or issue is None:
|
|
|
|
issue = 'NA'
|
2012-09-25 02:43:58 +00:00
|
|
|
comicend = n - 1 #comicend = comicend - 1 (adjustment for nil)
|
2012-09-13 15:27:34 +00:00
|
|
|
#find comicname
|
|
|
|
comicnm = issname[1]
|
|
|
|
n = 2
|
|
|
|
while (n < comicend + 1):
|
2013-02-11 17:07:15 +00:00
|
|
|
#stupid - this errors out if the array mistakingly goes to far.
|
2013-02-11 16:58:41 +00:00
|
|
|
try:
|
|
|
|
comicnm = comicnm + " " + issname[n]
|
|
|
|
except IndexError:
|
2013-02-16 18:30:22 +00:00
|
|
|
#print ("went too far looking at this comic...adjusting.")
|
2013-02-11 16:58:41 +00:00
|
|
|
comicnm = comicnm
|
|
|
|
break
|
2012-09-13 15:27:34 +00:00
|
|
|
n+=1
|
|
|
|
#print ("Comicname: " + str(comicnm) )
|
|
|
|
#get remainder
|
|
|
|
if len(issname) <= (comicend + 2):
|
|
|
|
comicrm = "None"
|
|
|
|
else:
|
|
|
|
#print ("length:" + str(len(issname)))
|
|
|
|
#print ("end:" + str(comicend + 2))
|
|
|
|
comicrm = issname[comicend +2]
|
|
|
|
if '$' in comicrm:
|
|
|
|
comicrm="None"
|
|
|
|
n = (comicend + 3)
|
|
|
|
while (n < issnamec):
|
|
|
|
if '$' in (issname[n]) or 'PI' in (issname[n]):
|
|
|
|
break
|
|
|
|
comicrm = str(comicrm) + " " + str(issname[n])
|
|
|
|
n+=1
|
|
|
|
#print ("Comic Extra info: " + str(comicrm) )
|
|
|
|
if "NA" not in issue and issue != "":
|
2013-02-11 17:07:15 +00:00
|
|
|
#print ("shipdate:" + str(shipdate))
|
|
|
|
#print ("pub: " + str(pub))
|
|
|
|
#print ("issue: " + str(issue))
|
2012-09-13 15:27:34 +00:00
|
|
|
dupefound = "no"
|
|
|
|
#--start duplicate comic / issue chk
|
|
|
|
for excl in excludes:
|
|
|
|
if excl in str(comicrm):
|
|
|
|
#duplicate comic / issue detected - don't add...
|
|
|
|
dupefound = "yes"
|
|
|
|
if prevcomic == str(comicnm) and previssue == str(issue):
|
|
|
|
#duplicate comic/issue detected - don't add...
|
|
|
|
dupefound = "yes"
|
|
|
|
#--end duplicate chk
|
|
|
|
if (dupefound != "yes") and ('NA' not in str(issue)):
|
|
|
|
newtxtfile.write(str(shipdate) + '\t' + str(pub) + '\t' + str(issue) + '\t' + str(comicnm) + '\t' + str(comicrm) + '\tSkipped' + '\n')
|
|
|
|
prevcomic = str(comicnm)
|
|
|
|
previssue = str(issue)
|
2013-01-15 17:32:08 +00:00
|
|
|
logger.info(u"Populating the NEW Weekly Pull list into Mylar.")
|
2012-09-13 15:27:34 +00:00
|
|
|
newtxtfile.close()
|
|
|
|
|
2012-09-26 20:53:08 +00:00
|
|
|
mylardb = os.path.join(mylar.DATA_DIR, "mylar.db")
|
|
|
|
|
|
|
|
connection = sqlite3.connect(str(mylardb))
|
2012-09-13 15:27:34 +00:00
|
|
|
cursor = connection.cursor()
|
|
|
|
|
|
|
|
cursor.executescript('drop table if exists weekly;')
|
|
|
|
|
|
|
|
cursor.execute("CREATE TABLE IF NOT EXISTS weekly (SHIPDATE, PUBLISHER text, ISSUE text, COMIC VARCHAR(150), EXTRA text, STATUS text);")
|
|
|
|
connection.commit()
|
|
|
|
|
|
|
|
|
|
|
|
csvfile = open(newfl, "rb")
|
|
|
|
creader = csv.reader(csvfile, delimiter='\t')
|
|
|
|
t=1
|
|
|
|
|
|
|
|
for row in creader:
|
|
|
|
if "MERCHANDISE" in row: break
|
|
|
|
if "MAGAZINES" in row: break
|
|
|
|
if "BOOK" in row: break
|
|
|
|
#print (row)
|
|
|
|
try:
|
|
|
|
cursor.execute("INSERT INTO weekly VALUES (?,?,?,?,?,?);", row)
|
|
|
|
except Exception, e:
|
|
|
|
#print ("Error - invald arguments...-skipping")
|
|
|
|
pass
|
|
|
|
t+=1
|
|
|
|
csvfile.close()
|
|
|
|
connection.commit()
|
|
|
|
connection.close()
|
2013-01-15 17:32:08 +00:00
|
|
|
logger.info(u"Weekly Pull List successfully loaded.")
|
2012-09-13 15:27:34 +00:00
|
|
|
#let's delete the files
|
2012-09-26 20:53:08 +00:00
|
|
|
pullpath = str(mylar.CACHE_DIR) + "/"
|
2012-09-13 15:27:34 +00:00
|
|
|
os.remove( str(pullpath) + "Clean-newreleases.txt" )
|
|
|
|
os.remove( str(pullpath) + "newreleases.txt" )
|
2013-04-08 16:31:41 +00:00
|
|
|
pullitcheck(forcecheck=forcecheck)
|
2012-09-13 15:27:34 +00:00
|
|
|
|
2013-04-08 16:31:41 +00:00
|
|
|
def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None):
|
2013-01-15 17:32:08 +00:00
|
|
|
logger.info(u"Checking the Weekly Releases list for comics I'm watching...")
|
2012-10-16 08:16:29 +00:00
|
|
|
myDB = db.DBConnection()
|
|
|
|
|
2012-09-13 15:27:34 +00:00
|
|
|
not_t = ['TP',
|
|
|
|
'NA',
|
|
|
|
'HC',
|
|
|
|
'PI']
|
|
|
|
|
|
|
|
not_c = ['PTG',
|
|
|
|
'COMBO PACK',
|
|
|
|
'(PP #']
|
|
|
|
|
|
|
|
lines = []
|
|
|
|
unlines = []
|
|
|
|
llen = []
|
|
|
|
ccname = []
|
2013-01-18 09:18:31 +00:00
|
|
|
pubdate = []
|
2012-09-13 15:27:34 +00:00
|
|
|
w = 0
|
|
|
|
tot = 0
|
|
|
|
chkout = []
|
|
|
|
watchfnd = []
|
|
|
|
watchfndiss = []
|
|
|
|
watchfndextra = []
|
|
|
|
|
|
|
|
#print ("----------WATCHLIST--------")
|
|
|
|
a_list = []
|
|
|
|
b_list = []
|
|
|
|
comicid = []
|
|
|
|
|
2012-09-26 20:53:08 +00:00
|
|
|
mylardb = os.path.join(mylar.DATA_DIR, "mylar.db")
|
|
|
|
|
|
|
|
con = sqlite3.connect(str(mylardb))
|
2012-09-13 15:27:34 +00:00
|
|
|
|
|
|
|
with con:
|
|
|
|
|
|
|
|
cur = con.cursor()
|
2013-01-02 16:57:28 +00:00
|
|
|
# if it's a one-off check (during an add series), load the comicname here and ignore below.
|
|
|
|
if comic1off_name:
|
2013-05-04 01:57:19 +00:00
|
|
|
logger.fdebug("this is a one-off" + str(comic1off_name))
|
2013-01-02 16:57:28 +00:00
|
|
|
lines.append(comic1off_name.strip())
|
|
|
|
unlines.append(comic1off_name.strip())
|
|
|
|
comicid.append(comic1off_id)
|
|
|
|
w = 1
|
|
|
|
else:
|
|
|
|
#let's read in the comic.watchlist from the db here
|
2013-01-18 09:18:31 +00:00
|
|
|
cur.execute("SELECT ComicID, ComicName, ComicYear, ComicPublisher, ComicPublished from comics")
|
2013-01-02 16:57:28 +00:00
|
|
|
while True:
|
|
|
|
watchd = cur.fetchone()
|
2013-01-11 21:20:51 +00:00
|
|
|
#print ("watchd: " + str(watchd))
|
|
|
|
if watchd is None:
|
2013-01-02 16:57:28 +00:00
|
|
|
break
|
2013-03-12 16:13:43 +00:00
|
|
|
if 'Present' in watchd[4] or (helpers.now()[:4] in watchd[4]):
|
2013-01-18 09:18:31 +00:00
|
|
|
# let's not even bother with comics that are in the Present.
|
|
|
|
a_list.append(watchd[1])
|
|
|
|
b_list.append(watchd[2])
|
|
|
|
comicid.append(watchd[0])
|
|
|
|
pubdate.append(watchd[4])
|
|
|
|
#print ( "Comic:" + str(a_list[w]) + " Year: " + str(b_list[w]) )
|
|
|
|
#if "WOLVERINE AND THE X-MEN" in str(a_list[w]): a_list[w] = "WOLVERINE AND X-MEN"
|
|
|
|
lines.append(a_list[w].strip())
|
|
|
|
unlines.append(a_list[w].strip())
|
|
|
|
llen.append(a_list[w].splitlines())
|
|
|
|
ccname.append(a_list[w].strip())
|
|
|
|
tmpwords = a_list[w].split(None)
|
|
|
|
ltmpwords = len(tmpwords)
|
|
|
|
ltmp = 1
|
|
|
|
w+=1
|
2012-09-13 15:27:34 +00:00
|
|
|
cnt = int(w-1)
|
|
|
|
cntback = int(w-1)
|
|
|
|
kp = []
|
|
|
|
ki = []
|
|
|
|
kc = []
|
|
|
|
otot = 0
|
2012-10-16 08:16:29 +00:00
|
|
|
|
2012-12-31 16:52:16 +00:00
|
|
|
logger.fdebug("You are watching for: " + str(w) + " comics")
|
2012-09-13 15:27:34 +00:00
|
|
|
#print ("----------THIS WEEK'S PUBLISHED COMICS------------")
|
|
|
|
if w > 0:
|
|
|
|
while (cnt > -1):
|
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
|
|
|
lines[cnt] = lines[cnt].upper()
|
2013-01-02 16:57:28 +00:00
|
|
|
#llen[cnt] = str(llen[cnt])
|
2013-05-04 01:57:19 +00:00
|
|
|
#logger.fdebug("looking for : " + str(lines[cnt]))
|
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
|
|
|
sqlsearch = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\&\'\?\@]', ' ', lines[cnt])
|
2013-01-01 20:09:28 +00:00
|
|
|
sqlsearch = re.sub(r'\s', '%', sqlsearch)
|
2013-01-11 21:20:51 +00:00
|
|
|
if 'THE' in sqlsearch: sqlsearch = re.sub('THE', '', sqlsearch)
|
2013-01-28 20:31:43 +00:00
|
|
|
if '+' in sqlsearch: sqlsearch = re.sub('\+', '%PLUS%', sqlsearch)
|
2013-05-04 01:57:19 +00:00
|
|
|
#logger.fdebug("searchsql: " + str(sqlsearch))
|
2013-01-01 20:09:28 +00:00
|
|
|
weekly = myDB.select('SELECT PUBLISHER, ISSUE, COMIC, EXTRA, SHIPDATE FROM weekly WHERE COMIC LIKE (?)', [sqlsearch])
|
2012-10-16 08:16:29 +00:00
|
|
|
#cur.execute('SELECT PUBLISHER, ISSUE, COMIC, EXTRA, SHIPDATE FROM weekly WHERE COMIC LIKE (?)', [lines[cnt]])
|
|
|
|
for week in weekly:
|
|
|
|
if week == None:
|
2012-09-13 15:27:34 +00:00
|
|
|
break
|
|
|
|
for nono in not_t:
|
2012-10-16 08:16:29 +00:00
|
|
|
if nono in week['PUBLISHER']:
|
2013-05-04 01:57:19 +00:00
|
|
|
#logger.fdebug("nono present")
|
2013-01-01 20:09:28 +00:00
|
|
|
break
|
|
|
|
if nono in week['ISSUE']:
|
2013-02-16 18:30:22 +00:00
|
|
|
#logger.fdebug("graphic novel/tradeback detected..ignoring.")
|
2012-09-13 15:27:34 +00:00
|
|
|
break
|
|
|
|
for nothere in not_c:
|
2012-10-16 08:16:29 +00:00
|
|
|
if nothere in week['EXTRA']:
|
2013-02-16 18:30:22 +00:00
|
|
|
#logger.fdebug("nothere present")
|
2012-09-13 15:27:34 +00:00
|
|
|
break
|
|
|
|
else:
|
2012-10-16 08:16:29 +00:00
|
|
|
comicnm = week['COMIC']
|
2012-09-13 15:27:34 +00:00
|
|
|
#here's the tricky part, ie. BATMAN will match on
|
|
|
|
#every batman comic, not exact
|
2013-02-16 18:30:22 +00:00
|
|
|
#logger.fdebug("comparing" + str(comicnm) + "..to.." + str(unlines[cnt]).upper())
|
2013-01-01 20:09:28 +00:00
|
|
|
|
|
|
|
#-NEW-
|
|
|
|
# strip out all special characters and compare
|
2013-04-26 03:45:02 +00:00
|
|
|
watchcomic = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\&\'\?\@]', '', unlines[cnt])
|
|
|
|
comicnm = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\&\'\?\@]', '', comicnm)
|
2013-01-01 20:09:28 +00:00
|
|
|
watchcomic = re.sub(r'\s', '', watchcomic)
|
|
|
|
comicnm = re.sub(r'\s', '', comicnm)
|
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
|
|
|
#logger.fdebug("Revised_Watch: " + watchcomic)
|
|
|
|
#logger.fdebug("ComicNM: " + comicnm)
|
|
|
|
if 'THE' in watchcomic.upper():
|
2013-02-11 16:58:41 +00:00
|
|
|
modwatchcomic = re.sub('THE', '', watchcomic.upper())
|
2013-01-11 21:20:51 +00:00
|
|
|
modcomicnm = re.sub('THE', '', comicnm)
|
2013-02-11 16:58:41 +00:00
|
|
|
else:
|
|
|
|
modwatchcomic = watchcomic
|
|
|
|
modcomicnm = comicnm
|
2013-01-28 20:31:43 +00:00
|
|
|
#thnx to A+X for this...
|
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
|
|
|
if '+' in watchcomic:
|
2013-04-26 03:45:02 +00:00
|
|
|
logger.fdebug("+ detected...adjusting.")
|
2013-05-04 01:57:19 +00:00
|
|
|
#logger.fdebug("comicnm:" + comicnm)
|
|
|
|
#logger.fdebug("watchcomic:" + watchcomic)
|
2013-04-26 03:45:02 +00:00
|
|
|
modwatchcomic = re.sub('\+', 'PLUS', modwatchcomic)
|
2013-05-04 01:57:19 +00:00
|
|
|
#logger.fdebug("modcomicnm:" + modcomicnm)
|
|
|
|
#logger.fdebug("modwatchcomic:" + modwatchcomic)
|
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
|
|
|
if comicnm == watchcomic.upper() or modcomicnm == modwatchcomic.upper():
|
2013-04-26 03:45:02 +00:00
|
|
|
logger.fdebug("matched on:" + str(comicnm) + "..." + str(watchcomic).upper())
|
2013-02-16 18:30:22 +00:00
|
|
|
pass
|
2012-10-16 08:16:29 +00:00
|
|
|
elif ("ANNUAL" in week['EXTRA']):
|
2012-09-13 15:27:34 +00:00
|
|
|
pass
|
|
|
|
#print ( row[3] + " matched on ANNUAL")
|
|
|
|
else:
|
|
|
|
break
|
2012-10-16 08:16:29 +00:00
|
|
|
if ("NA" not in week['ISSUE']) and ("HC" not in week['ISSUE']):
|
|
|
|
if ("COMBO PACK" not in week['EXTRA']) and ("2ND PTG" not in week['EXTRA']) and ("3RD PTG" not in week['EXTRA']):
|
2012-09-13 15:27:34 +00:00
|
|
|
otot+=1
|
|
|
|
dontadd = "no"
|
|
|
|
if dontadd == "no":
|
|
|
|
#print (row[0], row[1], row[2])
|
|
|
|
tot+=1
|
2012-10-16 08:16:29 +00:00
|
|
|
#kp.append(row[0])
|
|
|
|
#ki.append(row[1])
|
|
|
|
#kc.append(comicnm)
|
|
|
|
if ("ANNUAL" in week['EXTRA']):
|
2012-09-13 15:27:34 +00:00
|
|
|
watchfndextra.append("annual")
|
|
|
|
else:
|
|
|
|
watchfndextra.append("none")
|
|
|
|
watchfnd.append(comicnm)
|
2012-10-16 08:16:29 +00:00
|
|
|
watchfndiss.append(week['ISSUE'])
|
2012-09-13 15:27:34 +00:00
|
|
|
ComicID = comicid[cnt]
|
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
|
|
|
if not mylar.CV_ONLY:
|
|
|
|
ComicIssue = str(watchfndiss[tot -1] + ".00")
|
|
|
|
else:
|
|
|
|
ComicIssue = str(watchfndiss[tot -1])
|
2012-10-16 08:16:29 +00:00
|
|
|
ComicDate = str(week['SHIPDATE'])
|
2012-09-13 15:27:34 +00:00
|
|
|
ComicName = str(unlines[cnt])
|
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
|
|
|
logger.fdebug("Watchlist hit for : " + ComicName + " ISSUE: " + str(watchfndiss[tot -1]))
|
2012-09-13 15:27:34 +00:00
|
|
|
# here we add to comics.latest
|
|
|
|
updater.latest_update(ComicID=ComicID, LatestIssue=ComicIssue, LatestDate=ComicDate)
|
|
|
|
# here we add to upcoming table...
|
2013-04-26 03:45:02 +00:00
|
|
|
statusupdate = updater.upcoming_update(ComicID=ComicID, ComicName=ComicName, IssueNumber=ComicIssue, IssueDate=ComicDate, forcecheck=forcecheck)
|
2012-09-13 15:27:34 +00:00
|
|
|
# here we update status of weekly table...
|
2013-05-04 01:57:19 +00:00
|
|
|
if statusupdate is not None:
|
|
|
|
cstatus = statusupdate['Status']
|
|
|
|
cstatusid = statusupdate['ComicID']
|
|
|
|
else:
|
|
|
|
cstatus = None
|
|
|
|
cstatusid = None
|
|
|
|
updater.weekly_update(ComicName=week['COMIC'], IssueNumber=ComicIssue, CStatus=cstatus, CID=cstatusid)
|
2012-09-13 15:27:34 +00:00
|
|
|
break
|
|
|
|
break
|
|
|
|
break
|
|
|
|
cnt-=1
|
|
|
|
#print ("-------------------------")
|
2013-01-01 20:09:28 +00:00
|
|
|
logger.fdebug("There are " + str(otot) + " comics this week to get!")
|
2012-09-13 15:27:34 +00:00
|
|
|
#print ("However I've already grabbed " + str(btotal) )
|
|
|
|
#print ("I need to get " + str(tot) + " comic(s)!" )
|
2013-01-15 17:32:08 +00:00
|
|
|
logger.info(u"Finished checking for comics on my watchlist.")
|
2012-10-16 08:16:29 +00:00
|
|
|
#con.close()
|
2012-09-13 15:27:34 +00:00
|
|
|
return
|