mirror of
https://github.com/evilhero/mylar
synced 2025-03-09 13:24:53 +00:00
FIX:(#939) Redirect loop error when ALT_PULL is enabled and attempting to load up new weekly pull-list, FIX: When manual post-processing and doing out-of-whack check if the Series Total was equal to the Have Total, would perform an endless loop sequence.
This commit is contained in:
parent
e7b1cdf843
commit
946c3d48e1
2 changed files with 12 additions and 6 deletions
|
@ -1592,7 +1592,7 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None):
|
|||
logger.fdebug('[DUPECHECK] Checking series for unrefreshed series syndrome (USS).')
|
||||
havechk = myDB.selectone('SELECT * FROM comics WHERE ComicID=?', [ComicID]).fetchone()
|
||||
if havechk:
|
||||
if havechk['Have'] >= havechk['Total']:
|
||||
if havechk['Have'] > havechk['Total']:
|
||||
logger.info('[DUPECHECK] Series has invalid issue totals [' + str(havechk['Have']) + '/' + str(havechk['Total']) + '] Attempting to Refresh & continue post-processing this issue.')
|
||||
cid.append(ComicID)
|
||||
logger.fdebug('[DUPECHECK] ComicID: ' + str(ComicID))
|
||||
|
@ -1637,10 +1637,10 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None):
|
|||
|
||||
if mylar.DUPECONSTRAINT == 'filesize':
|
||||
if filesz <= dupsize:
|
||||
logger.info('[DUPECHECK-FILESIZE PRIORITY] [#' + dupchk['Issue_Number'] + '] Retaining currently scanned in filename : ' + filename)
|
||||
logger.info('[DUPECHECK-FILESIZE PRIORITY] [#' + dupchk['Issue_Number'] + '] Retaining currently scanned in filename : ' + dupchk['Location'])
|
||||
rtnval = "dupe"
|
||||
else:
|
||||
logger.info('[DUPECHECK-FILESIZE PRIORITY] [#' + dupchk['Issue_Number'] + '] Retaining newly scanned in filename : ' + dupchk['Location'])
|
||||
logger.info('[DUPECHECK-FILESIZE PRIORITY] [#' + dupchk['Issue_Number'] + '] Retaining newly scanned in filename : ' + filename)
|
||||
rtnval = "write"
|
||||
|
||||
else:
|
||||
|
|
|
@ -23,8 +23,11 @@ def newpull():
|
|||
soup = BeautifulSoup (pageresponse)
|
||||
getthedate = soup.findAll("div", {"class": "Headline"})[0]
|
||||
#the date will be in the FIRST ahref
|
||||
getdate_link = getthedate('a')[0]
|
||||
newdates = getdate_link.findNext(text=True).strip()
|
||||
try:
|
||||
getdate_link = getthedate('a')[0]
|
||||
newdates = getdate_link.findNext(text=True).strip()
|
||||
except IndexError:
|
||||
newdates = getthedate.findNext(text=True).strip()
|
||||
logger.fdebug('New Releases date detected as : ' + re.sub('New Releases For', '', newdates).strip())
|
||||
cntlinks = soup.findAll('tr')
|
||||
lenlinks = len(cntlinks)
|
||||
|
@ -43,7 +46,10 @@ def newpull():
|
|||
|
||||
while (x < lenlinks):
|
||||
headt = cntlinks[x] #iterate through the hrefs pulling out only results.
|
||||
if '?stockItemID=' in str(headt):
|
||||
if 'STK669382' in str(headt):
|
||||
x+=1
|
||||
continue
|
||||
elif '?stockItemID=' in str(headt):
|
||||
#914 - Dark Horse Comics
|
||||
#915 - DC Comics
|
||||
#916 - IDW Publishing
|
||||
|
|
Loading…
Add table
Reference in a new issue