mirror of
https://github.com/evilhero/mylar
synced 2025-03-09 21:33:42 +00:00
FIX:(#891) If series on watchlist was on pull-list listed as a comp (ie.l-4), would assume it was a valid issue and error out, FIX: Fixed an invalid cache location reference point
This commit is contained in:
parent
3a6332e122
commit
b760c913f7
2 changed files with 27 additions and 6 deletions
|
@ -86,7 +86,7 @@ def newpull():
|
|||
x+=1
|
||||
|
||||
logger.fdebug('Saving new pull-list information into local file for subsequent merge')
|
||||
except_file = '/home/hero/mylar/cache/newreleases.txt'
|
||||
except_file = os.path.join(mylar.CACHE_DIR, 'newreleases.txt')
|
||||
try:
|
||||
csvfile = open(str(except_file), 'rb')
|
||||
csvfile.close()
|
||||
|
|
|
@ -119,6 +119,10 @@ def pullit(forcecheck=None):
|
|||
'ONE SHOT',
|
||||
'PI']
|
||||
|
||||
#denotes issues that contain special characters within that would normally fail when checked if issue ONLY contained numerics.
|
||||
#add freely, just lowercase and exclude decimals (they get stripped during comparisons)
|
||||
specialissues = {'au','ai','inh','now'}
|
||||
|
||||
pub = "COMICS"
|
||||
prevcomic = ""
|
||||
previssue = ""
|
||||
|
@ -129,7 +133,7 @@ def pullit(forcecheck=None):
|
|||
logger.info('[PULL-LIST] Populating & Loading pull-list data directly from webpage')
|
||||
newpull.newpull()
|
||||
else:
|
||||
logger.info('[PULL-LIST] Populating & Loading pull-list data from file : ' + newrl)
|
||||
logger.info('[PULL-LIST] Populating & Loading pull-list data from file')
|
||||
f = urllib.urlretrieve(PULLURL, newrl)
|
||||
|
||||
#newtxtfile header info ("SHIPDATE\tPUBLISHER\tISSUE\tCOMIC\tEXTRA\tSTATUS\n")
|
||||
|
@ -232,7 +236,20 @@ def pullit(forcecheck=None):
|
|||
if issname[n] == "PI":
|
||||
issue = "NA"
|
||||
break
|
||||
|
||||
#this is to ensure we don't get any comps added by removing them entirely (ie. #1-4, etc)
|
||||
x = None
|
||||
try:
|
||||
x = float( re.sub('#','', issname[n].strip()) )
|
||||
except ValueError, e:
|
||||
if any(d in re.sub(r'[^a-zA-Z0-9]','',issname[n]).strip() for d in specialissues):
|
||||
issue = issname[n]
|
||||
else:
|
||||
logger.fdebug('Comp issue set detected as : ' + str(issname[n]) + '. Ignoring.')
|
||||
issue = 'NA'
|
||||
else:
|
||||
issue = issname[n]
|
||||
|
||||
if 'ongoing' not in issname[n-1].lower() and '(vu)' not in issname[n-1].lower():
|
||||
#print ("issue found : " + issname[n])
|
||||
comicend = n - 1
|
||||
|
@ -585,7 +602,7 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None, futurepul
|
|||
if '+' in sqlsearch: sqlsearch = re.sub('\+', '%PLUS%', sqlsearch)
|
||||
sqlsearch = re.sub(r'\s', '%', sqlsearch)
|
||||
sqlsearch = sqlsearch + '%'
|
||||
logger.fdebug("searchsql: " + sqlsearch)
|
||||
#logger.fdebug("searchsql: " + sqlsearch)
|
||||
if futurepull is None:
|
||||
weekly = myDB.select('SELECT PUBLISHER, ISSUE, COMIC, EXTRA, SHIPDATE FROM weekly WHERE COMIC LIKE (?)', [sqlsearch])
|
||||
else:
|
||||
|
@ -874,6 +891,10 @@ def checkthis(datecheck,datestatus,usedate):
|
|||
logger.fdebug('Store Date falls within acceptable range - series MATCH')
|
||||
valid_check = True
|
||||
elif int(datecheck) < int(usedate):
|
||||
if datecheck == '00000000':
|
||||
logger.fdebug('Issue date retrieved as : ' + str(datecheck) + '. This is unpopulated data on CV, which normally means it\'s a new issue and is awaiting data population.')
|
||||
valid_check = True
|
||||
else:
|
||||
logger.fdebug('The issue date of issue was on ' + str(datecheck) + ' which is prior to ' + str(usedate))
|
||||
valid_check = False
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue