2012-09-13 15:27:34 +00:00
|
|
|
# This file is part of Mylar.
|
|
|
|
#
|
|
|
|
# Mylar is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Mylar is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Mylar. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
|
|
|
|
from __future__ import print_function
|
|
|
|
|
2012-12-27 15:04:03 +00:00
|
|
|
import sys
|
|
|
|
import fileinput
|
|
|
|
import csv
|
|
|
|
import getopt
|
|
|
|
import sqlite3
|
|
|
|
import urllib
|
|
|
|
import os
|
|
|
|
import time
|
2012-09-13 15:27:34 +00:00
|
|
|
import re
|
2013-05-09 02:22:47 +00:00
|
|
|
import datetime
|
2012-09-13 15:27:34 +00:00
|
|
|
|
|
|
|
import mylar
|
|
|
|
from mylar import db, updater, helpers, logger
|
|
|
|
|
2013-04-08 16:31:41 +00:00
|
|
|
def pullit(forcecheck=None):
|
2012-09-13 15:27:34 +00:00
|
|
|
myDB = db.DBConnection()
|
2012-12-27 15:04:03 +00:00
|
|
|
popit = myDB.select("SELECT count(*) FROM sqlite_master WHERE name='weekly' and type='table'")
|
2012-09-13 15:27:34 +00:00
|
|
|
if popit:
|
2012-12-27 15:04:03 +00:00
|
|
|
try:
|
2013-01-15 17:32:08 +00:00
|
|
|
pull_date = myDB.action("SELECT SHIPDATE from weekly").fetchone()
|
|
|
|
logger.info(u"Weekly pull list present - checking if it's up-to-date..")
|
2013-05-19 04:09:51 +00:00
|
|
|
if (pull_date is None):
|
|
|
|
pulldate = '00000000'
|
|
|
|
else:
|
|
|
|
pulldate = pull_date['SHIPDATE']
|
2013-01-23 07:50:49 +00:00
|
|
|
except (sqlite3.OperationalError, TypeError),msg:
|
2012-12-27 15:04:03 +00:00
|
|
|
conn=sqlite3.connect(mylar.DB_FILE)
|
|
|
|
c=conn.cursor()
|
|
|
|
logger.info(u"Error Retrieving weekly pull list - attempting to adjust")
|
|
|
|
c.execute('DROP TABLE weekly')
|
2013-05-19 04:09:51 +00:00
|
|
|
c.execute('CREATE TABLE IF NOT EXISTS weekly (SHIPDATE text, PUBLISHER text, ISSUE text, COMIC VARCHAR(150), EXTRA text, STATUS text, ComicID text)')
|
2012-12-27 15:04:03 +00:00
|
|
|
pulldate = '00000000'
|
2013-05-19 04:09:51 +00:00
|
|
|
logger.fdebug(u"Table re-created, trying to populate")
|
2012-09-13 15:27:34 +00:00
|
|
|
else:
|
|
|
|
logger.info(u"No pullist found...I'm going to try and get a new list now.")
|
|
|
|
pulldate = '00000000'
|
2012-12-27 15:04:03 +00:00
|
|
|
if pulldate is None: pulldate = '00000000'
|
2012-09-13 15:27:34 +00:00
|
|
|
PULLURL = 'http://www.previewsworld.com/shipping/newreleases.txt'
|
|
|
|
#PULLURL = 'http://www.previewsworld.com/Archive/GetFile/1/1/71/994/081512.txt'
|
|
|
|
|
2014-01-16 20:25:02 +00:00
|
|
|
#Prepare the Substitute name switch for pulllist to comic vine conversion
|
|
|
|
substitutes = os.path.join(mylar.DATA_DIR,"substitutes.csv")
|
|
|
|
if not os.path.exists(substitutes):
|
|
|
|
logger.debug('no substitues.csv file located - not performing substitutions on weekly pull list')
|
|
|
|
substitute_check = False
|
|
|
|
else:
|
|
|
|
substitute_check = True
|
|
|
|
#shortrep is the name to be replaced, longrep the replacement
|
|
|
|
shortrep=[]
|
|
|
|
longrep=[]
|
|
|
|
#open the file data
|
|
|
|
with open(substitutes) as f:
|
|
|
|
reader = csv.reader(f, delimiter='|')
|
|
|
|
for row in reader:
|
2014-01-29 19:24:53 +00:00
|
|
|
if not row[0].startswith('#'):
|
|
|
|
logger.fdebug("Substitutes file read : "+str(row))
|
2014-01-16 20:25:02 +00:00
|
|
|
shortrep.append(row[0])
|
|
|
|
longrep.append(row[1])
|
|
|
|
f.close()
|
|
|
|
|
2012-09-13 15:27:34 +00:00
|
|
|
not_these=['PREVIEWS',
|
|
|
|
'Shipping',
|
|
|
|
'Every Wednesday',
|
|
|
|
'Please check with',
|
|
|
|
'PREMIER PUBLISHERS',
|
|
|
|
'BOOKS',
|
|
|
|
'COLLECTIBLES',
|
|
|
|
'MCFARLANE TOYS',
|
2013-02-25 20:25:10 +00:00
|
|
|
'New Releases',
|
|
|
|
'Upcoming Releases']
|
2012-09-13 15:27:34 +00:00
|
|
|
|
|
|
|
excludes=['2ND PTG',
|
|
|
|
'3RD PTG',
|
|
|
|
'4TH PTG',
|
|
|
|
'5TH PTG',
|
|
|
|
'NEW PTG',
|
|
|
|
'POSTER',
|
|
|
|
'COMBO PACK']
|
|
|
|
|
|
|
|
# this checks for the following lists
|
|
|
|
# first need to only look for checkit variables
|
|
|
|
checkit=['COMICS',
|
2013-07-15 15:17:39 +00:00
|
|
|
'COMIC & GRAPHIC NOVELS',
|
2012-09-13 15:27:34 +00:00
|
|
|
'IDW PUBLISHING',
|
|
|
|
'MAGAZINES',
|
|
|
|
'MERCHANDISE']
|
|
|
|
|
|
|
|
#if COMICS is found, determine which publisher
|
|
|
|
checkit2=['DC',
|
|
|
|
'MARVEL',
|
|
|
|
'DARK HORSE',
|
|
|
|
'IMAGE']
|
|
|
|
# used to determine type of comic (one shot, hardcover, tradeback, softcover, graphic novel)
|
|
|
|
cmty=['HC',
|
|
|
|
'TP',
|
|
|
|
'GN',
|
|
|
|
'SC',
|
|
|
|
'ONE SHOT',
|
|
|
|
'PI']
|
|
|
|
|
|
|
|
pub = "COMICS"
|
|
|
|
prevcomic = ""
|
|
|
|
previssue = ""
|
|
|
|
|
|
|
|
#newtxtfile header info ("SHIPDATE\tPUBLISHER\tISSUE\tCOMIC\tEXTRA\tSTATUS\n")
|
|
|
|
#STATUS denotes default status to be applied to pulllist in Mylar (default = Skipped)
|
|
|
|
newrl = mylar.CACHE_DIR + "/newreleases.txt"
|
2012-12-27 15:04:03 +00:00
|
|
|
f = urllib.urlretrieve(PULLURL, newrl)
|
|
|
|
# local_file = open(newrl, "wb")
|
|
|
|
# local_file.write(f.read())
|
|
|
|
# local_file.close
|
2012-09-13 15:27:34 +00:00
|
|
|
|
|
|
|
newfl = mylar.CACHE_DIR + "/Clean-newreleases.txt"
|
|
|
|
newtxtfile = open(newfl, 'wb')
|
|
|
|
|
2013-12-11 18:18:13 +00:00
|
|
|
if check(newrl, 'Service Unavailable'):
|
|
|
|
logger.info('Retrieval site is offline at the moment.Aborting pull-list update amd will try again later.')
|
|
|
|
pullitcheck(forcecheck=forcecheck)
|
|
|
|
else:
|
|
|
|
pass
|
|
|
|
|
2012-09-13 15:27:34 +00:00
|
|
|
for i in open(newrl):
|
|
|
|
if not i.strip():
|
|
|
|
continue
|
|
|
|
if 'MAGAZINES' in i: break
|
|
|
|
if 'MERCHANDISE' in i: break
|
|
|
|
for nono in not_these:
|
|
|
|
if nono in i:
|
|
|
|
#let's try and grab the date for future pull checks
|
2013-02-25 20:25:10 +00:00
|
|
|
if i.startswith('Shipping') or i.startswith('New Releases') or i.startswith('Upcoming Releases'):
|
2012-09-13 15:27:34 +00:00
|
|
|
shipdatechk = i.split()
|
2012-12-27 15:04:03 +00:00
|
|
|
if i.startswith('Shipping'):
|
2012-09-13 15:27:34 +00:00
|
|
|
shipdate = shipdatechk[1]
|
2012-12-27 15:04:03 +00:00
|
|
|
elif i.startswith('New Releases'):
|
2012-09-13 15:27:34 +00:00
|
|
|
shipdate = shipdatechk[3]
|
2013-02-25 20:25:10 +00:00
|
|
|
elif i.startswith('Upcoming Releases'):
|
|
|
|
shipdate = shipdatechk[3]
|
2012-09-13 15:27:34 +00:00
|
|
|
sdsplit = shipdate.split('/')
|
|
|
|
mo = sdsplit[0]
|
|
|
|
dy = sdsplit[1]
|
|
|
|
if len(mo) == 1: mo = "0" + sdsplit[0]
|
|
|
|
if len(dy) == 1: dy = "0" + sdsplit[1]
|
|
|
|
shipdate = sdsplit[2] + "-" + mo + "-" + dy
|
|
|
|
shipdaterep = shipdate.replace('-', '')
|
2012-12-27 15:04:03 +00:00
|
|
|
pulldate = re.sub('-', '', str(pulldate))
|
2012-09-13 15:27:34 +00:00
|
|
|
#print ("shipdate: " + str(shipdaterep))
|
|
|
|
#print ("today: " + str(pulldate))
|
|
|
|
if pulldate == shipdaterep:
|
|
|
|
logger.info(u"No new pull-list available - will re-check again in 24 hours.")
|
2012-09-24 05:17:29 +00:00
|
|
|
pullitcheck()
|
2013-02-11 17:07:15 +00:00
|
|
|
mylar.PULLNEW = 'no'
|
2012-09-13 15:27:34 +00:00
|
|
|
return
|
2013-01-15 17:32:08 +00:00
|
|
|
else:
|
|
|
|
logger.info(u"Preparing to update to the new listing.")
|
2012-09-13 15:27:34 +00:00
|
|
|
break
|
|
|
|
else:
|
2013-02-11 17:07:15 +00:00
|
|
|
mylar.PULLNEW = 'yes'
|
2012-09-13 15:27:34 +00:00
|
|
|
for yesyes in checkit:
|
|
|
|
if yesyes in i:
|
|
|
|
if format(str(yesyes)) == 'COMICS':
|
|
|
|
for chkchk in checkit2:
|
|
|
|
flagged = "no"
|
|
|
|
if chkchk in i:
|
|
|
|
bl = i.split()
|
|
|
|
blchk = str(bl[0]) + " " + str(bl[1])
|
|
|
|
if chkchk in blchk:
|
|
|
|
pub = format(str(chkchk)) + " COMICS"
|
|
|
|
#print (pub)
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
if i.find("COMICS") < 1 and "GRAPHIC NOVELS" in i:
|
|
|
|
pub = "COMICS"
|
|
|
|
#print (pub)
|
|
|
|
break
|
|
|
|
elif i.find("COMICS") > 12:
|
|
|
|
#print ("comics word found in comic title")
|
|
|
|
flagged = "yes"
|
|
|
|
break
|
|
|
|
else:
|
2013-07-15 15:17:39 +00:00
|
|
|
if i.find("COMIC") < 1 and "GRAPHIC NOVELS" in i:
|
|
|
|
pub = "COMICS"
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
pub = format(str(yesyes))
|
|
|
|
#print (pub)
|
|
|
|
break
|
2012-09-13 15:27:34 +00:00
|
|
|
if flagged == "no":
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
dupefound = "no"
|
|
|
|
if '#' in i:
|
|
|
|
issname = i.split()
|
2014-01-16 20:25:02 +00:00
|
|
|
#print (issname)
|
2012-09-13 15:27:34 +00:00
|
|
|
issnamec = len(issname)
|
|
|
|
n = 0
|
|
|
|
while (n < issnamec):
|
|
|
|
#find the issue
|
|
|
|
if '#' in (issname[n]):
|
|
|
|
if issname[n] == "PI":
|
|
|
|
issue = "NA"
|
|
|
|
break
|
|
|
|
issue = issname[n]
|
2013-04-10 03:17:07 +00:00
|
|
|
if 'ongoing' not in issname[n-1].lower() and '(vu)' not in issname[n-1].lower():
|
2013-02-19 07:57:41 +00:00
|
|
|
#print ("issue found : " + issname[n])
|
|
|
|
comicend = n - 1
|
|
|
|
else:
|
|
|
|
comicend = n - 2
|
2012-09-13 15:27:34 +00:00
|
|
|
break
|
|
|
|
n+=1
|
|
|
|
if issue == "": issue = 'NA'
|
|
|
|
#find comicname
|
|
|
|
comicnm = issname[1]
|
|
|
|
n = 2
|
|
|
|
while (n < comicend + 1):
|
|
|
|
comicnm = comicnm + " " + issname[n]
|
|
|
|
n+=1
|
|
|
|
#print ("Comicname: " + str(comicnm) )
|
|
|
|
#get remainder
|
|
|
|
comicrm = issname[comicend +2]
|
|
|
|
if '$' in comicrm:
|
|
|
|
comicrm="None"
|
|
|
|
n = (comicend + 3)
|
|
|
|
while (n < issnamec):
|
|
|
|
if '$' in (issname[n]):
|
|
|
|
break
|
|
|
|
comicrm = str(comicrm) + " " + str(issname[n])
|
|
|
|
n+=1
|
|
|
|
#print ("Comic Extra info: " + str(comicrm) )
|
|
|
|
#print ("ship: " + str(shipdate))
|
|
|
|
#print ("pub: " + str(pub))
|
|
|
|
#print ("issue: " + str(issue))
|
2012-12-27 15:04:03 +00:00
|
|
|
#--let's make sure we don't wipe out decimal issues ;)
|
2013-12-11 18:18:13 +00:00
|
|
|
# if '.' in issue:
|
|
|
|
# issue_decimal = re.compile(r'[^\d.]+')
|
|
|
|
# issue = issue_decimal.sub('', str(issue))
|
|
|
|
# else: issue = re.sub('#','', issue)
|
|
|
|
issue = re.sub('#','', issue)
|
2012-12-27 15:04:03 +00:00
|
|
|
#issue = re.sub("\D", "", str(issue))
|
2012-09-13 15:27:34 +00:00
|
|
|
#store the previous comic/issue for comparison to filter out duplicate issues/alt covers
|
|
|
|
#print ("Previous Comic & Issue: " + str(prevcomic) + "--" + str(previssue))
|
|
|
|
dupefound = "no"
|
|
|
|
else:
|
|
|
|
#if it doesn't have a '#' in the line, then we know it's either
|
|
|
|
#a special edition of some kind, or a non-comic
|
|
|
|
issname = i.split()
|
2014-01-16 20:25:02 +00:00
|
|
|
#print (issname)
|
2012-09-13 15:27:34 +00:00
|
|
|
issnamec = len(issname)
|
|
|
|
n = 1
|
|
|
|
issue = ''
|
|
|
|
while (n < issnamec):
|
|
|
|
#find the type of non-issue (TP,HC,GN,SC,OS,PI etc)
|
|
|
|
for cm in cmty:
|
|
|
|
if "ONE" in issue and "SHOT" in issname[n+1]: issue = "OS"
|
|
|
|
if cm == (issname[n]):
|
|
|
|
if issname[n] == 'PI':
|
|
|
|
issue = 'NA'
|
|
|
|
break
|
|
|
|
issue = issname[n]
|
|
|
|
#print ("non-issue found : " + issue)
|
|
|
|
comicend = n - 1
|
|
|
|
break
|
|
|
|
n+=1
|
|
|
|
#if the comic doesn't have an issue # or a keyword, adjust.
|
|
|
|
#set it to 'NA' and it'll be filtered out anyways.
|
|
|
|
if issue == "" or issue is None:
|
|
|
|
issue = 'NA'
|
2012-09-25 02:43:58 +00:00
|
|
|
comicend = n - 1 #comicend = comicend - 1 (adjustment for nil)
|
2012-09-13 15:27:34 +00:00
|
|
|
#find comicname
|
|
|
|
comicnm = issname[1]
|
|
|
|
n = 2
|
|
|
|
while (n < comicend + 1):
|
2013-02-11 17:07:15 +00:00
|
|
|
#stupid - this errors out if the array mistakingly goes to far.
|
2013-02-11 16:58:41 +00:00
|
|
|
try:
|
|
|
|
comicnm = comicnm + " " + issname[n]
|
|
|
|
except IndexError:
|
2013-02-16 18:30:22 +00:00
|
|
|
#print ("went too far looking at this comic...adjusting.")
|
2013-02-11 16:58:41 +00:00
|
|
|
comicnm = comicnm
|
|
|
|
break
|
2012-09-13 15:27:34 +00:00
|
|
|
n+=1
|
|
|
|
#print ("Comicname: " + str(comicnm) )
|
|
|
|
#get remainder
|
|
|
|
if len(issname) <= (comicend + 2):
|
|
|
|
comicrm = "None"
|
|
|
|
else:
|
|
|
|
#print ("length:" + str(len(issname)))
|
|
|
|
#print ("end:" + str(comicend + 2))
|
|
|
|
comicrm = issname[comicend +2]
|
|
|
|
if '$' in comicrm:
|
|
|
|
comicrm="None"
|
|
|
|
n = (comicend + 3)
|
|
|
|
while (n < issnamec):
|
|
|
|
if '$' in (issname[n]) or 'PI' in (issname[n]):
|
|
|
|
break
|
|
|
|
comicrm = str(comicrm) + " " + str(issname[n])
|
|
|
|
n+=1
|
|
|
|
#print ("Comic Extra info: " + str(comicrm) )
|
|
|
|
if "NA" not in issue and issue != "":
|
2013-02-11 17:07:15 +00:00
|
|
|
#print ("shipdate:" + str(shipdate))
|
|
|
|
#print ("pub: " + str(pub))
|
|
|
|
#print ("issue: " + str(issue))
|
2012-09-13 15:27:34 +00:00
|
|
|
dupefound = "no"
|
|
|
|
#--start duplicate comic / issue chk
|
2013-05-23 03:24:56 +00:00
|
|
|
# pullist has shortforms of a series' title sometimes and causes problems
|
|
|
|
if 'O/T' in comicnm:
|
|
|
|
comicnm = re.sub('O/T', 'OF THE', comicnm)
|
2014-01-16 20:25:02 +00:00
|
|
|
|
|
|
|
if substitute_check == True:
|
|
|
|
#Step through the list - storing an index
|
|
|
|
for repindex,repcheck in enumerate(shortrep):
|
2014-01-29 19:24:53 +00:00
|
|
|
if len(comicnm) >= len(repcheck):
|
2014-01-16 20:25:02 +00:00
|
|
|
#if the leftmost chars match the short text then replace them with the long text
|
|
|
|
if comicnm[:len(repcheck)]==repcheck:
|
|
|
|
logger.info("Switch worked on "+comicnm + " replacing " + str(repcheck) + " with " + str(longrep[repindex]))
|
|
|
|
comicnm = re.sub(repcheck, longrep[repindex], comicnm)
|
|
|
|
|
2012-09-13 15:27:34 +00:00
|
|
|
for excl in excludes:
|
|
|
|
if excl in str(comicrm):
|
|
|
|
#duplicate comic / issue detected - don't add...
|
|
|
|
dupefound = "yes"
|
|
|
|
if prevcomic == str(comicnm) and previssue == str(issue):
|
|
|
|
#duplicate comic/issue detected - don't add...
|
|
|
|
dupefound = "yes"
|
|
|
|
#--end duplicate chk
|
|
|
|
if (dupefound != "yes") and ('NA' not in str(issue)):
|
|
|
|
newtxtfile.write(str(shipdate) + '\t' + str(pub) + '\t' + str(issue) + '\t' + str(comicnm) + '\t' + str(comicrm) + '\tSkipped' + '\n')
|
|
|
|
prevcomic = str(comicnm)
|
|
|
|
previssue = str(issue)
|
2013-01-15 17:32:08 +00:00
|
|
|
logger.info(u"Populating the NEW Weekly Pull list into Mylar.")
|
2012-09-13 15:27:34 +00:00
|
|
|
newtxtfile.close()
|
|
|
|
|
2012-09-26 20:53:08 +00:00
|
|
|
mylardb = os.path.join(mylar.DATA_DIR, "mylar.db")
|
|
|
|
|
|
|
|
connection = sqlite3.connect(str(mylardb))
|
2012-09-13 15:27:34 +00:00
|
|
|
cursor = connection.cursor()
|
|
|
|
|
|
|
|
cursor.executescript('drop table if exists weekly;')
|
|
|
|
|
2013-05-19 04:09:51 +00:00
|
|
|
cursor.execute("CREATE TABLE IF NOT EXISTS weekly (SHIPDATE, PUBLISHER text, ISSUE text, COMIC VARCHAR(150), EXTRA text, STATUS text, ComicID text);")
|
2012-09-13 15:27:34 +00:00
|
|
|
connection.commit()
|
|
|
|
|
|
|
|
|
|
|
|
csvfile = open(newfl, "rb")
|
|
|
|
creader = csv.reader(csvfile, delimiter='\t')
|
|
|
|
t=1
|
|
|
|
|
|
|
|
for row in creader:
|
|
|
|
if "MERCHANDISE" in row: break
|
|
|
|
if "MAGAZINES" in row: break
|
|
|
|
if "BOOK" in row: break
|
|
|
|
#print (row)
|
|
|
|
try:
|
2013-05-19 04:09:51 +00:00
|
|
|
logger.debug("Row: %s" % row)
|
|
|
|
cursor.execute("INSERT INTO weekly VALUES (?,?,?,?,?,?,null);", row)
|
2012-09-13 15:27:34 +00:00
|
|
|
except Exception, e:
|
|
|
|
#print ("Error - invald arguments...-skipping")
|
|
|
|
pass
|
|
|
|
t+=1
|
|
|
|
csvfile.close()
|
|
|
|
connection.commit()
|
|
|
|
connection.close()
|
2013-01-15 17:32:08 +00:00
|
|
|
logger.info(u"Weekly Pull List successfully loaded.")
|
2012-09-13 15:27:34 +00:00
|
|
|
#let's delete the files
|
2012-09-26 20:53:08 +00:00
|
|
|
pullpath = str(mylar.CACHE_DIR) + "/"
|
2012-09-13 15:27:34 +00:00
|
|
|
os.remove( str(pullpath) + "Clean-newreleases.txt" )
|
|
|
|
os.remove( str(pullpath) + "newreleases.txt" )
|
2013-04-08 16:31:41 +00:00
|
|
|
pullitcheck(forcecheck=forcecheck)
|
2012-09-13 15:27:34 +00:00
|
|
|
|
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
|
|
|
def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None, futurepull=None):
|
|
|
|
if futurepull is None:
|
|
|
|
logger.info(u"Checking the Weekly Releases list for comics I'm watching...")
|
|
|
|
else:
|
|
|
|
logger.info('Checking the Future Releases list for upcoming comics I am watching for...')
|
2012-10-16 08:16:29 +00:00
|
|
|
myDB = db.DBConnection()
|
|
|
|
|
2012-09-13 15:27:34 +00:00
|
|
|
not_t = ['TP',
|
|
|
|
'NA',
|
|
|
|
'HC',
|
|
|
|
'PI']
|
|
|
|
|
|
|
|
not_c = ['PTG',
|
|
|
|
'COMBO PACK',
|
|
|
|
'(PP #']
|
|
|
|
|
|
|
|
lines = []
|
|
|
|
unlines = []
|
|
|
|
llen = []
|
|
|
|
ccname = []
|
2013-01-18 09:18:31 +00:00
|
|
|
pubdate = []
|
2012-09-13 15:27:34 +00:00
|
|
|
w = 0
|
2014-02-26 19:48:50 +00:00
|
|
|
wc = 0
|
2012-09-13 15:27:34 +00:00
|
|
|
tot = 0
|
|
|
|
chkout = []
|
|
|
|
watchfnd = []
|
|
|
|
watchfndiss = []
|
|
|
|
watchfndextra = []
|
2014-02-26 19:48:50 +00:00
|
|
|
alternate = []
|
2012-09-13 15:27:34 +00:00
|
|
|
|
|
|
|
#print ("----------WATCHLIST--------")
|
|
|
|
a_list = []
|
|
|
|
b_list = []
|
|
|
|
comicid = []
|
|
|
|
|
2012-09-26 20:53:08 +00:00
|
|
|
mylardb = os.path.join(mylar.DATA_DIR, "mylar.db")
|
|
|
|
|
|
|
|
con = sqlite3.connect(str(mylardb))
|
2012-09-13 15:27:34 +00:00
|
|
|
|
|
|
|
with con:
|
|
|
|
|
|
|
|
cur = con.cursor()
|
2013-01-02 16:57:28 +00:00
|
|
|
# if it's a one-off check (during an add series), load the comicname here and ignore below.
|
|
|
|
if comic1off_name:
|
2013-07-01 05:19:15 +00:00
|
|
|
logger.fdebug("this is a one-off" + comic1off_name)
|
2013-01-02 16:57:28 +00:00
|
|
|
lines.append(comic1off_name.strip())
|
|
|
|
unlines.append(comic1off_name.strip())
|
|
|
|
comicid.append(comic1off_id)
|
|
|
|
w = 1
|
|
|
|
else:
|
|
|
|
#let's read in the comic.watchlist from the db here
|
2014-02-26 19:48:50 +00:00
|
|
|
cur.execute("SELECT ComicID, ComicName, ComicYear, ComicPublisher, ComicPublished, LatestDate, ForceContinuing, AlternateSearch from comics")
|
2013-01-02 16:57:28 +00:00
|
|
|
while True:
|
|
|
|
watchd = cur.fetchone()
|
2013-01-11 21:20:51 +00:00
|
|
|
#print ("watchd: " + str(watchd))
|
|
|
|
if watchd is None:
|
2013-01-02 16:57:28 +00:00
|
|
|
break
|
2013-08-19 06:14:47 +00:00
|
|
|
if 'Present' in watchd[4] or (helpers.now()[:4] in watchd[4]) or watchd[6] == 1:
|
2013-05-09 02:22:47 +00:00
|
|
|
# this gets buggered up when series are named the same, and one ends in the current
|
|
|
|
# year, and the new series starts in the same year - ie. Avengers
|
|
|
|
# lets' grab the latest issue date and see how far it is from current
|
|
|
|
# anything > 45 days we'll assume it's a false match ;)
|
2013-07-01 05:19:15 +00:00
|
|
|
logger.fdebug("ComicName: " + watchd[1])
|
2013-05-09 02:22:47 +00:00
|
|
|
latestdate = watchd[5]
|
2013-07-01 05:19:15 +00:00
|
|
|
logger.fdebug("latestdate: " + str(latestdate))
|
|
|
|
if latestdate[8:] == '':
|
|
|
|
logger.fdebug("invalid date " + str(latestdate) + " appending 01 for day for continuation.")
|
|
|
|
latest_day = '01'
|
|
|
|
else:
|
|
|
|
latest_day = latestdate[8:]
|
|
|
|
c_date = datetime.date(int(latestdate[:4]),int(latestdate[5:7]),int(latest_day))
|
2013-05-09 02:22:47 +00:00
|
|
|
n_date = datetime.date.today()
|
2013-07-01 05:19:15 +00:00
|
|
|
logger.fdebug("c_date : " + str(c_date) + " ... n_date : " + str(n_date))
|
2013-05-09 02:22:47 +00:00
|
|
|
recentchk = (n_date - c_date).days
|
2013-07-01 05:19:15 +00:00
|
|
|
logger.fdebug("recentchk: " + str(recentchk) + " days")
|
|
|
|
chklimit = helpers.checkthepub(watchd[0])
|
|
|
|
logger.fdebug("Check date limit set to : " + str(chklimit))
|
|
|
|
logger.fdebug(" ----- ")
|
2013-08-19 06:14:47 +00:00
|
|
|
if recentchk < int(chklimit) or watchd[6] == 1:
|
|
|
|
if watchd[6] == 1:
|
|
|
|
logger.fdebug('Forcing Continuing Series enabled for series...')
|
2013-05-09 02:22:47 +00:00
|
|
|
# let's not even bother with comics that are in the Present.
|
|
|
|
a_list.append(watchd[1])
|
|
|
|
b_list.append(watchd[2])
|
|
|
|
comicid.append(watchd[0])
|
|
|
|
pubdate.append(watchd[4])
|
|
|
|
lines.append(a_list[w].strip())
|
|
|
|
unlines.append(a_list[w].strip())
|
2014-02-26 19:48:50 +00:00
|
|
|
w+=1 # we need to increment the count here, so we don't count the same comics twice (albeit with alternate names)
|
|
|
|
|
|
|
|
#here we load in the alternate search names for a series and assign them the comicid and
|
|
|
|
#alternate names
|
|
|
|
Altload = helpers.LoadAlternateSearchNames(watchd[7], watchd[0])
|
|
|
|
if Altload == 'no results':
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
wc = 0
|
|
|
|
alt_cid = Altload['ComicID']
|
|
|
|
n = 0
|
|
|
|
iscnt = Altload['Count']
|
|
|
|
while (n <= iscnt):
|
|
|
|
try:
|
|
|
|
altval = Altload['AlternateName'][n]
|
|
|
|
except IndexError:
|
|
|
|
break
|
|
|
|
cleanedname = altval['AlternateName']
|
|
|
|
a_list.append(altval['AlternateName'])
|
|
|
|
b_list.append(watchd[2])
|
|
|
|
comicid.append(alt_cid)
|
|
|
|
pubdate.append(watchd[4])
|
|
|
|
lines.append(a_list[w+wc].strip())
|
|
|
|
unlines.append(a_list[w+wc].strip())
|
2014-03-04 05:11:52 +00:00
|
|
|
logger.fdebug('loading in Alternate name for ' + str(cleanedname))
|
2014-02-26 19:48:50 +00:00
|
|
|
n+=1
|
|
|
|
wc+=1
|
|
|
|
w+=wc
|
|
|
|
|
|
|
|
#-- to be removed -
|
|
|
|
#print ( "Comic:" + str(a_list[w]) + " Year: " + str(b_list[w]) )
|
|
|
|
#if "WOLVERINE AND THE X-MEN" in str(a_list[w]): a_list[w] = "WOLVERINE AND X-MEN"
|
|
|
|
#lines.append(a_list[w].strip())
|
|
|
|
#unlines.append(a_list[w].strip())
|
|
|
|
#llen.append(a_list[w].splitlines())
|
|
|
|
#ccname.append(a_list[w].strip())
|
|
|
|
#tmpwords = a_list[w].split(None)
|
|
|
|
#ltmpwords = len(tmpwords)
|
|
|
|
#ltmp = 1
|
|
|
|
#-- end to be removed
|
2013-07-01 05:19:15 +00:00
|
|
|
else:
|
|
|
|
logger.fdebug("Determined to not be a Continuing series at this time.")
|
2012-09-13 15:27:34 +00:00
|
|
|
cnt = int(w-1)
|
|
|
|
cntback = int(w-1)
|
|
|
|
kp = []
|
|
|
|
ki = []
|
|
|
|
kc = []
|
|
|
|
otot = 0
|
2012-10-16 08:16:29 +00:00
|
|
|
|
2012-12-31 16:52:16 +00:00
|
|
|
logger.fdebug("You are watching for: " + str(w) + " comics")
|
2012-09-13 15:27:34 +00:00
|
|
|
#print ("----------THIS WEEK'S PUBLISHED COMICS------------")
|
|
|
|
if w > 0:
|
|
|
|
while (cnt > -1):
|
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
|
|
|
lines[cnt] = lines[cnt].upper()
|
2013-01-02 16:57:28 +00:00
|
|
|
#llen[cnt] = str(llen[cnt])
|
2013-07-01 05:19:15 +00:00
|
|
|
logger.fdebug("looking for : " + lines[cnt])
|
|
|
|
sqlsearch = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\'\?\@]', ' ', lines[cnt])
|
|
|
|
sqlsearch = re.sub("\&", '%', sqlsearch)
|
|
|
|
sqlsearch = re.sub("\\bAND\\b", '%', sqlsearch)
|
|
|
|
sqlsearch = re.sub("\\bTHE\\b", '', sqlsearch)
|
2013-01-28 20:31:43 +00:00
|
|
|
if '+' in sqlsearch: sqlsearch = re.sub('\+', '%PLUS%', sqlsearch)
|
2013-07-01 05:19:15 +00:00
|
|
|
sqlsearch = re.sub(r'\s', '%', sqlsearch)
|
2013-07-30 04:57:37 +00:00
|
|
|
sqlsearch = sqlsearch + '%'
|
2014-03-04 05:11:52 +00:00
|
|
|
#logger.fdebug("searchsql: " + sqlsearch)
|
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
|
|
|
if futurepull is None:
|
|
|
|
weekly = myDB.select('SELECT PUBLISHER, ISSUE, COMIC, EXTRA, SHIPDATE FROM weekly WHERE COMIC LIKE (?)', [sqlsearch])
|
|
|
|
else:
|
|
|
|
weekly = myDB.select('SELECT PUBLISHER, ISSUE, COMIC, EXTRA, SHIPDATE FROM future WHERE COMIC LIKE (?)', [sqlsearch])
|
2012-10-16 08:16:29 +00:00
|
|
|
#cur.execute('SELECT PUBLISHER, ISSUE, COMIC, EXTRA, SHIPDATE FROM weekly WHERE COMIC LIKE (?)', [lines[cnt]])
|
|
|
|
for week in weekly:
|
|
|
|
if week == None:
|
2012-09-13 15:27:34 +00:00
|
|
|
break
|
|
|
|
for nono in not_t:
|
2012-10-16 08:16:29 +00:00
|
|
|
if nono in week['PUBLISHER']:
|
2013-05-04 01:57:19 +00:00
|
|
|
#logger.fdebug("nono present")
|
2013-01-01 20:09:28 +00:00
|
|
|
break
|
|
|
|
if nono in week['ISSUE']:
|
2013-02-16 18:30:22 +00:00
|
|
|
#logger.fdebug("graphic novel/tradeback detected..ignoring.")
|
2012-09-13 15:27:34 +00:00
|
|
|
break
|
|
|
|
for nothere in not_c:
|
2012-10-16 08:16:29 +00:00
|
|
|
if nothere in week['EXTRA']:
|
2013-02-16 18:30:22 +00:00
|
|
|
#logger.fdebug("nothere present")
|
2012-09-13 15:27:34 +00:00
|
|
|
break
|
|
|
|
else:
|
2012-10-16 08:16:29 +00:00
|
|
|
comicnm = week['COMIC']
|
2012-09-13 15:27:34 +00:00
|
|
|
#here's the tricky part, ie. BATMAN will match on
|
|
|
|
#every batman comic, not exact
|
2013-07-01 05:19:15 +00:00
|
|
|
logger.fdebug("comparing" + comicnm + "..to.." + unlines[cnt].upper())
|
2013-01-01 20:09:28 +00:00
|
|
|
|
|
|
|
#-NEW-
|
|
|
|
# strip out all special characters and compare
|
2013-07-01 05:19:15 +00:00
|
|
|
watchcomic = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\'\?\@]', '', unlines[cnt])
|
|
|
|
comicnm = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\'\?\@]', '', comicnm)
|
|
|
|
if "THE" in watchcomic.upper() or "THE" in comicnm.upper():
|
|
|
|
modwatchcomic = re.sub("\\bTHE\\b", "", watchcomic.upper())
|
|
|
|
modcomicnm = re.sub("\\bTHE\\b", "", comicnm)
|
2013-02-11 16:58:41 +00:00
|
|
|
else:
|
|
|
|
modwatchcomic = watchcomic
|
|
|
|
modcomicnm = comicnm
|
2013-07-01 05:19:15 +00:00
|
|
|
if '&' in watchcomic.upper():
|
|
|
|
modwatchcomic = re.sub('\&', 'AND', modwatchcomic.upper())
|
|
|
|
modcomicnm = re.sub('\&', 'AND', modcomicnm)
|
|
|
|
if '&' in comicnm:
|
|
|
|
modwatchcom = re.sub('\&', 'AND', modwatchcomic.upper())
|
|
|
|
modcomicnm = re.sub('\&', 'AND', modcomicnm)
|
2013-01-28 20:31:43 +00:00
|
|
|
#thnx to A+X for this...
|
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
|
|
|
if '+' in watchcomic:
|
2013-04-26 03:45:02 +00:00
|
|
|
logger.fdebug("+ detected...adjusting.")
|
2013-05-04 01:57:19 +00:00
|
|
|
#logger.fdebug("comicnm:" + comicnm)
|
|
|
|
#logger.fdebug("watchcomic:" + watchcomic)
|
2013-04-26 03:45:02 +00:00
|
|
|
modwatchcomic = re.sub('\+', 'PLUS', modwatchcomic)
|
2013-05-04 01:57:19 +00:00
|
|
|
#logger.fdebug("modcomicnm:" + modcomicnm)
|
|
|
|
#logger.fdebug("modwatchcomic:" + modwatchcomic)
|
2013-07-01 05:19:15 +00:00
|
|
|
|
2013-07-30 04:57:37 +00:00
|
|
|
#annuals!
|
|
|
|
if 'ANNUAL' in comicnm.upper():
|
|
|
|
modcomicnm = re.sub("\\bANNUAL\\b", "", modcomicnm.upper())
|
|
|
|
|
2013-07-01 05:19:15 +00:00
|
|
|
watchcomic = re.sub(r'\s', '', watchcomic)
|
|
|
|
comicnm = re.sub(r'\s', '', comicnm)
|
|
|
|
modwatchcomic = re.sub(r'\s', '', modwatchcomic)
|
|
|
|
modcomicnm = re.sub(r'\s', '', modcomicnm)
|
|
|
|
logger.fdebug("watchcomic : " + str(watchcomic) + " / mod :" + str(modwatchcomic))
|
|
|
|
logger.fdebug("comicnm : " + str(comicnm) + " / mod :" + str(modcomicnm))
|
2014-02-26 19:48:50 +00:00
|
|
|
|
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
|
|
|
if comicnm == watchcomic.upper() or modcomicnm == modwatchcomic.upper():
|
2013-07-01 05:19:15 +00:00
|
|
|
logger.fdebug("matched on:" + comicnm + "..." + watchcomic.upper())
|
2013-02-16 18:30:22 +00:00
|
|
|
pass
|
2013-07-30 04:57:37 +00:00
|
|
|
# elif ("ANNUAL" in week['EXTRA']):
|
|
|
|
# pass
|
|
|
|
# print ( row[3] + " matched on ANNUAL")
|
2012-09-13 15:27:34 +00:00
|
|
|
else:
|
|
|
|
break
|
2014-02-26 19:48:50 +00:00
|
|
|
|
|
|
|
|
2012-10-16 08:16:29 +00:00
|
|
|
if ("NA" not in week['ISSUE']) and ("HC" not in week['ISSUE']):
|
|
|
|
if ("COMBO PACK" not in week['EXTRA']) and ("2ND PTG" not in week['EXTRA']) and ("3RD PTG" not in week['EXTRA']):
|
IMP: Removed unneeded code from several modules, IMP:(#659) Subdirectories now will be scanned in properly when in series directories, FIX:(#635)(#658) Moved code from html into module to allow for better integration and further templating needs, IMP: Added ability for mylar to auto-grab SABnzbd API when provided with user/pass, IMP: Test SABnzbd button works again - will auto-rollback to NZBKey usage, IMP: Added counts to Upcoming page, IMP: Added some more detailed parsing to the ImportResults, IMP: ImportResults will now show proper series title, IMP: Improved some logic when determining if a new weekly issue is on a watchlist if a rebooted series, IMP: A bunch of smaller fixes, and some various code tweaks...
2014-04-02 19:08:59 +00:00
|
|
|
|
|
|
|
#this all needs to get redone, so the ability to compare issue dates can be done systematically.
|
|
|
|
#Everything below should be in it's own function - at least the callable sections - in doing so, we can
|
|
|
|
#then do comparisons when two titles of the same name exist and are by definition 'current'. Issue date comparisons
|
|
|
|
#would identify the difference between two #1 titles within the same series year, but have different publishing dates.
|
|
|
|
#Wolverine (2013) & Wolverine (2014) are good examples of this situation.
|
|
|
|
#of course initially, the issue data for the newer series wouldn't have any issue data associated with it so it would be
|
|
|
|
#a null value, but given that the 2013 series (as an example) would be from 2013-05-01, it obviously wouldn't be a match to
|
|
|
|
#the current date & year (2014). Throwing out that, we could just assume that the 2014 would match the #1.
|
|
|
|
|
|
|
|
#get the issue number of the 'weeklypull' series.
|
|
|
|
#load in the actual series issue number's store-date (not publishing date)
|
|
|
|
#---use a function to check db, then return the results in a tuple/list to avoid db locks.
|
|
|
|
#if the store-date is >= weeklypull-list date then continue processing below.
|
|
|
|
#if the store-date is <= weeklypull-list date then break.
|
|
|
|
### week['ISSUE'] #issue # from pullist
|
|
|
|
### week['SHIPDATE'] #weeklypull-list date
|
|
|
|
### comicid[cnt] #comicid of matched series
|
|
|
|
|
|
|
|
## if it's a futurepull, the dates get mixed up when two titles exist of the same name
|
|
|
|
## ie. Wolverine-2011 & Wolverine-2014
|
|
|
|
## we need to set the compare date to today's date ( Now() ) in this case.
|
|
|
|
if futurepull:
|
|
|
|
usedate = datetime.datetime.now().strftime('%Y%m%d') #convert to yyyymmdd
|
|
|
|
else:
|
|
|
|
usedate = re.sub("[^0-9]", "", week['SHIPDATE'])
|
|
|
|
|
|
|
|
if 'ANNUAL' in comicnm.upper():
|
|
|
|
chktype = 'annual'
|
|
|
|
else:
|
|
|
|
chktype = 'series'
|
|
|
|
|
|
|
|
datevalues = loaditup(watchcomic, comicid[cnt], week['ISSUE'], chktype)
|
|
|
|
|
|
|
|
date_downloaded = None
|
|
|
|
altissuenum = None
|
|
|
|
|
|
|
|
if datevalues == 'no results':
|
|
|
|
#if a series is a .NOW on the pullist, it won't match up against anything (probably) on CV
|
|
|
|
#let's grab the digit from the .NOW, poll it against CV to see if there's any data
|
|
|
|
#if there is, check the store date to make sure it's a 'new' release.
|
|
|
|
#if it is a new release that has the same store date as the .NOW, then we assume
|
|
|
|
#it's the same, and assign it the AltIssueNumber to do extra searches.
|
|
|
|
if week['ISSUE'].isdigit() == False:
|
|
|
|
altissuenum = re.sub("[^0-9]", "", week['ISSUE']) # carry this through to get added to db later if matches
|
|
|
|
logger.fdebug('altissuenum is: ' + str(altissuenum))
|
|
|
|
altvalues = loaditup(watchcomic, comicid[cnt], altissuenum, chktype)
|
|
|
|
if altvalues == 'no results':
|
|
|
|
logger.fdebug('No alternate Issue numbering - something is probably wrong somewhere.')
|
|
|
|
pass
|
|
|
|
|
|
|
|
validcheck = checkthis(altvalues[0]['issuedate'], altvalues[0]['status'], usedate)
|
|
|
|
if validcheck == False:
|
|
|
|
if date_downloaded is None:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
#logger.fdebug('issuedate:' + str(datevalues[0]['issuedate']))
|
|
|
|
#logger.fdebug('status:' + str(datevalues[0]['status']))
|
|
|
|
datestatus = datevalues[0]['status']
|
|
|
|
validcheck = checkthis(datevalues[0]['issuedate'], datestatus, usedate)
|
|
|
|
if validcheck == True:
|
|
|
|
if datestatus != 'Downloaded' and datestatus != 'Archived':
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
logger.fdebug('Issue #' + str(week['ISSUE']) + ' already downloaded.')
|
|
|
|
date_downloaded = datestatus
|
|
|
|
else:
|
|
|
|
if date_downloaded is None:
|
|
|
|
break
|
|
|
|
|
2012-09-13 15:27:34 +00:00
|
|
|
otot+=1
|
|
|
|
dontadd = "no"
|
|
|
|
if dontadd == "no":
|
|
|
|
#print (row[0], row[1], row[2])
|
|
|
|
tot+=1
|
2012-10-16 08:16:29 +00:00
|
|
|
#kp.append(row[0])
|
|
|
|
#ki.append(row[1])
|
|
|
|
#kc.append(comicnm)
|
2013-07-30 04:57:37 +00:00
|
|
|
if "ANNUAL" in comicnm.upper():
|
2012-09-13 15:27:34 +00:00
|
|
|
watchfndextra.append("annual")
|
2013-07-30 04:57:37 +00:00
|
|
|
ComicName = str(unlines[cnt]) + " Annual"
|
2012-09-13 15:27:34 +00:00
|
|
|
else:
|
2013-07-30 04:57:37 +00:00
|
|
|
ComicName = str(unlines[cnt])
|
2012-09-13 15:27:34 +00:00
|
|
|
watchfndextra.append("none")
|
|
|
|
watchfnd.append(comicnm)
|
2012-10-16 08:16:29 +00:00
|
|
|
watchfndiss.append(week['ISSUE'])
|
2012-09-13 15:27:34 +00:00
|
|
|
ComicID = comicid[cnt]
|
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
|
|
|
if not mylar.CV_ONLY:
|
|
|
|
ComicIssue = str(watchfndiss[tot -1] + ".00")
|
|
|
|
else:
|
|
|
|
ComicIssue = str(watchfndiss[tot -1])
|
2012-10-16 08:16:29 +00:00
|
|
|
ComicDate = str(week['SHIPDATE'])
|
2013-07-30 04:57:37 +00:00
|
|
|
#ComicName = str(unlines[cnt])
|
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
|
|
|
logger.fdebug("Watchlist hit for : " + ComicName + " ISSUE: " + str(watchfndiss[tot -1]))
|
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
|
|
|
|
|
|
|
if futurepull is None:
|
|
|
|
# here we add to comics.latest
|
|
|
|
updater.latest_update(ComicID=ComicID, LatestIssue=ComicIssue, LatestDate=ComicDate)
|
|
|
|
# here we add to upcoming table...
|
|
|
|
statusupdate = updater.upcoming_update(ComicID=ComicID, ComicName=ComicName, IssueNumber=ComicIssue, IssueDate=ComicDate, forcecheck=forcecheck)
|
|
|
|
else:
|
|
|
|
# here we add to upcoming table...
|
IMP: Removed unneeded code from several modules, IMP:(#659) Subdirectories now will be scanned in properly when in series directories, FIX:(#635)(#658) Moved code from html into module to allow for better integration and further templating needs, IMP: Added ability for mylar to auto-grab SABnzbd API when provided with user/pass, IMP: Test SABnzbd button works again - will auto-rollback to NZBKey usage, IMP: Added counts to Upcoming page, IMP: Added some more detailed parsing to the ImportResults, IMP: ImportResults will now show proper series title, IMP: Improved some logic when determining if a new weekly issue is on a watchlist if a rebooted series, IMP: A bunch of smaller fixes, and some various code tweaks...
2014-04-02 19:08:59 +00:00
|
|
|
statusupdate = updater.upcoming_update(ComicID=ComicID, ComicName=ComicName, IssueNumber=ComicIssue, IssueDate=ComicDate, forcecheck=forcecheck, futurepull='yes', altissuenumber=altissuenum)
|
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
|
|
|
|
2012-09-13 15:27:34 +00:00
|
|
|
# here we update status of weekly table...
|
2013-05-04 01:57:19 +00:00
|
|
|
if statusupdate is not None:
|
|
|
|
cstatus = statusupdate['Status']
|
|
|
|
cstatusid = statusupdate['ComicID']
|
|
|
|
else:
|
|
|
|
cstatus = None
|
|
|
|
cstatusid = None
|
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
|
|
|
#set the variable fp to denote updating the futurepull list ONLY
|
|
|
|
if futurepull is None:
|
|
|
|
fp = None
|
|
|
|
else:
|
|
|
|
cstatusid = ComicID
|
|
|
|
fp = "yes"
|
2014-03-19 19:07:25 +00:00
|
|
|
|
|
|
|
if date_downloaded is None:
|
IMP: Removed unneeded code from several modules, IMP:(#659) Subdirectories now will be scanned in properly when in series directories, FIX:(#635)(#658) Moved code from html into module to allow for better integration and further templating needs, IMP: Added ability for mylar to auto-grab SABnzbd API when provided with user/pass, IMP: Test SABnzbd button works again - will auto-rollback to NZBKey usage, IMP: Added counts to Upcoming page, IMP: Added some more detailed parsing to the ImportResults, IMP: ImportResults will now show proper series title, IMP: Improved some logic when determining if a new weekly issue is on a watchlist if a rebooted series, IMP: A bunch of smaller fixes, and some various code tweaks...
2014-04-02 19:08:59 +00:00
|
|
|
updater.weekly_update(ComicName=week['COMIC'], IssueNumber=ComicIssue, CStatus=cstatus, CID=cstatusid, futurepull=fp, altissuenumber=altissuenum)
|
2014-03-19 19:07:25 +00:00
|
|
|
else:
|
IMP: Removed unneeded code from several modules, IMP:(#659) Subdirectories now will be scanned in properly when in series directories, FIX:(#635)(#658) Moved code from html into module to allow for better integration and further templating needs, IMP: Added ability for mylar to auto-grab SABnzbd API when provided with user/pass, IMP: Test SABnzbd button works again - will auto-rollback to NZBKey usage, IMP: Added counts to Upcoming page, IMP: Added some more detailed parsing to the ImportResults, IMP: ImportResults will now show proper series title, IMP: Improved some logic when determining if a new weekly issue is on a watchlist if a rebooted series, IMP: A bunch of smaller fixes, and some various code tweaks...
2014-04-02 19:08:59 +00:00
|
|
|
updater.weekly_update(ComicName=week['COMIC'], IssueNumber=ComicIssue, CStatus=date_downloaded, CID=cstatusid, futurepull=fp, altissuenumber=altissuenum)
|
2012-09-13 15:27:34 +00:00
|
|
|
break
|
|
|
|
break
|
|
|
|
break
|
|
|
|
cnt-=1
|
|
|
|
#print ("-------------------------")
|
2013-01-01 20:09:28 +00:00
|
|
|
logger.fdebug("There are " + str(otot) + " comics this week to get!")
|
2012-09-13 15:27:34 +00:00
|
|
|
#print ("However I've already grabbed " + str(btotal) )
|
|
|
|
#print ("I need to get " + str(tot) + " comic(s)!" )
|
2013-01-15 17:32:08 +00:00
|
|
|
logger.info(u"Finished checking for comics on my watchlist.")
|
2012-10-16 08:16:29 +00:00
|
|
|
#con.close()
|
2012-09-13 15:27:34 +00:00
|
|
|
return
|
2013-12-11 18:18:13 +00:00
|
|
|
|
|
|
|
|
|
|
|
def check(fname, txt):
|
|
|
|
with open(fname) as dataf:
|
|
|
|
return any(txt in line for line in dataf)
|
2014-02-26 19:48:50 +00:00
|
|
|
|
|
|
|
|
2014-03-19 19:07:25 +00:00
|
|
|
def loaditup(comicname, comicid, issue, chktype):
|
2014-02-26 19:48:50 +00:00
|
|
|
myDB = db.DBConnection()
|
|
|
|
issue_number = helpers.issuedigits(issue)
|
2014-03-19 19:07:25 +00:00
|
|
|
if chktype == 'annual':
|
|
|
|
typedisplay = 'annual issue'
|
|
|
|
logger.fdebug('[' + comicname + '] trying to locate ' + str(typedisplay) + ' ' + str(issue) + ' to do comparitive issue analysis for pull-list')
|
|
|
|
issueload = myDB.action('SELECT * FROM annuals WHERE ComicID=? AND Int_IssueNumber=?', [comicid, issue_number]).fetchone()
|
|
|
|
else:
|
|
|
|
typedisplay = 'issue'
|
|
|
|
logger.fdebug('[' + comicname + '] trying to locate ' + str(typedisplay) + ' ' + str(issue) + ' to do comparitive issue analysis for pull-list')
|
|
|
|
issueload = myDB.action('SELECT * FROM issues WHERE ComicID=? AND Int_IssueNumber=?', [comicid, issue_number]).fetchone()
|
|
|
|
|
2014-02-26 19:48:50 +00:00
|
|
|
if issueload is None:
|
IMP: Removed unneeded code from several modules, IMP:(#659) Subdirectories now will be scanned in properly when in series directories, FIX:(#635)(#658) Moved code from html into module to allow for better integration and further templating needs, IMP: Added ability for mylar to auto-grab SABnzbd API when provided with user/pass, IMP: Test SABnzbd button works again - will auto-rollback to NZBKey usage, IMP: Added counts to Upcoming page, IMP: Added some more detailed parsing to the ImportResults, IMP: ImportResults will now show proper series title, IMP: Improved some logic when determining if a new weekly issue is on a watchlist if a rebooted series, IMP: A bunch of smaller fixes, and some various code tweaks...
2014-04-02 19:08:59 +00:00
|
|
|
logger.fdebug('No results matched for Issue number - either this is a NEW issue with no data yet, or something is wrong')
|
2014-02-26 19:48:50 +00:00
|
|
|
return 'no results'
|
2014-03-19 19:07:25 +00:00
|
|
|
|
|
|
|
dataissue = []
|
|
|
|
releasedate = issueload['ReleaseDate']
|
|
|
|
storedate = issueload['IssueDate']
|
|
|
|
status = issueload['Status']
|
|
|
|
|
|
|
|
if releasedate == '0000-00-00':
|
|
|
|
logger.fdebug('Store date of 0000-00-00 returned for ' + str(typedisplay) + ' # ' + str(issue) + '. Refreshing series to see if valid date present')
|
|
|
|
mismatch = 'no'
|
|
|
|
issuerecheck = mylar.importer.addComictoDB(comicid,mismatch,calledfrom='weekly',issuechk=issue_number,issuetype=chktype)
|
|
|
|
if issuerecheck is not None:
|
|
|
|
for il in issuerecheck:
|
|
|
|
#this is only one record..
|
|
|
|
releasedate = il['IssueDate']
|
|
|
|
storedate = il['ReleaseDate']
|
|
|
|
status = il['Status']
|
|
|
|
logger.fdebug('issue-recheck releasedate is : ' + str(releasedate))
|
|
|
|
logger.fdebug('issue-recheck storedate of : ' + str(storedate))
|
|
|
|
|
|
|
|
if releasedate is not None and releasedate != "None" and releasedate != "":
|
|
|
|
logger.fdebug('Returning Release Date for ' + str(typedisplay) + ' # ' + str(issue) + ' of ' + str(releasedate))
|
|
|
|
thedate = re.sub("[^0-9]", "", releasedate) #convert date to numerics only (should be in yyyymmdd)
|
|
|
|
#return releasedate
|
2014-02-26 19:48:50 +00:00
|
|
|
else:
|
2014-03-19 19:07:25 +00:00
|
|
|
logger.fdebug('Returning Publication Date for issue ' + str(typedisplay) + ' # ' + str(issue) + ' of ' + str(storedate))
|
|
|
|
if storedate is None and storedate != "None" and storedate != "":
|
|
|
|
logger.fdebug('no issue data available - both release date & store date. Returning no results')
|
|
|
|
return 'no results'
|
|
|
|
thedate = re.sub("[^0-9]", "", storedate) #convert date to numerics only (should be in yyyymmdd)
|
|
|
|
#return storedate
|
|
|
|
|
|
|
|
dataissue.append({"issuedate": thedate,
|
|
|
|
"status": status})
|
|
|
|
|
|
|
|
return dataissue
|
IMP: Removed unneeded code from several modules, IMP:(#659) Subdirectories now will be scanned in properly when in series directories, FIX:(#635)(#658) Moved code from html into module to allow for better integration and further templating needs, IMP: Added ability for mylar to auto-grab SABnzbd API when provided with user/pass, IMP: Test SABnzbd button works again - will auto-rollback to NZBKey usage, IMP: Added counts to Upcoming page, IMP: Added some more detailed parsing to the ImportResults, IMP: ImportResults will now show proper series title, IMP: Improved some logic when determining if a new weekly issue is on a watchlist if a rebooted series, IMP: A bunch of smaller fixes, and some various code tweaks...
2014-04-02 19:08:59 +00:00
|
|
|
|
|
|
|
def checkthis(datecheck,datestatus,usedate):
|
|
|
|
|
|
|
|
logger.fdebug('Now checking date comparison using an issue store date of ' + str(datecheck))
|
|
|
|
logger.fdebug('Using a compare date (usedate) of ' + str(usedate))
|
|
|
|
logger.fdebug('Status of ' + str(datestatus))
|
|
|
|
|
|
|
|
if int(datecheck) >= int(usedate):
|
|
|
|
logger.fdebug('Store Date falls within acceptable range - series MATCH')
|
|
|
|
valid_check = True
|
|
|
|
elif int(datecheck) < int(usedate):
|
2014-04-03 04:42:23 +00:00
|
|
|
logger.fdebug('The issue date of issue was on ' + str(datecheck) + ' which is prior to ' + str(usedate))
|
IMP: Removed unneeded code from several modules, IMP:(#659) Subdirectories now will be scanned in properly when in series directories, FIX:(#635)(#658) Moved code from html into module to allow for better integration and further templating needs, IMP: Added ability for mylar to auto-grab SABnzbd API when provided with user/pass, IMP: Test SABnzbd button works again - will auto-rollback to NZBKey usage, IMP: Added counts to Upcoming page, IMP: Added some more detailed parsing to the ImportResults, IMP: ImportResults will now show proper series title, IMP: Improved some logic when determining if a new weekly issue is on a watchlist if a rebooted series, IMP: A bunch of smaller fixes, and some various code tweaks...
2014-04-02 19:08:59 +00:00
|
|
|
valid_check = False
|
|
|
|
|
|
|
|
return valid_check
|