mirror of https://github.com/evilhero/mylar
IMP: Experimental search would not remove the word 'the' from series starting with it, resulting in non-hits for such series, IMP: Removed unnecessary print statements from solicit, IMP: If torrents was enabled, but no torrent providers enabled would still check/cache rss feeds, IMP: Additonal logging for weekly-pull copy to Weekly Download directory during post-processing
This commit is contained in:
parent
06d3773d21
commit
b6217b729b
|
@ -14,6 +14,8 @@ def Startit(searchName, searchIssue, searchYear, ComicVersion, IssDateFix):
|
|||
#searchName = "Uncanny Avengers"
|
||||
#searchIssue = "01"
|
||||
#searchYear = "2012"
|
||||
if searchName.lower().startswith('the '):
|
||||
searchName = searchName[4:]
|
||||
cName = searchName
|
||||
#clean up searchName due to webparse.
|
||||
searchName = searchName.replace("%20", " ")
|
||||
|
|
|
@ -35,7 +35,7 @@ def tehMain(forcerss=None):
|
|||
mylar.config_write()
|
||||
|
||||
#function for looping through nzbs/torrent feed
|
||||
if mylar.ENABLE_TORRENTS:
|
||||
if mylar.ENABLE_TORRENT_SEARCH: #and mylar.ENABLE_TORRENTS:
|
||||
logger.info('[RSS] Initiating Torrent RSS Check.')
|
||||
if mylar.ENABLE_KAT:
|
||||
logger.info('[RSS] Initiating Torrent RSS Feed Check on KAT.')
|
||||
|
|
|
@ -99,9 +99,9 @@ def solicit(month, year):
|
|||
while (x < lenlinks):
|
||||
headt = cntlinks[x] #iterate through the hrefs pulling out only results.
|
||||
if "/?page=article&id=" in str(headt):
|
||||
print ("titlet: " + str(headt))
|
||||
#print ("titlet: " + str(headt))
|
||||
headName = headt.findNext(text=True)
|
||||
print ('headName: ' + headName)
|
||||
#print ('headName: ' + headName)
|
||||
if 'Image' in headName: print 'IMAGE FOUND'
|
||||
if not all( ['Marvel' in headName, 'DC' in headName, 'Image' in headName] ) and ('Solicitations' in headName or 'Solicits' in headName):
|
||||
# test for month here (int(month) + 5)
|
||||
|
@ -117,7 +117,7 @@ def solicit(month, year):
|
|||
publishchk = False
|
||||
for pub in publishers:
|
||||
if pub in headName[:pubstart]:
|
||||
print 'publisher:' + str(publishers[pub])
|
||||
#print 'publisher:' + str(publishers[pub])
|
||||
publish.append(publishers[pub])
|
||||
publishchk = True
|
||||
break
|
||||
|
@ -127,7 +127,7 @@ def solicit(month, year):
|
|||
abc = headt.findAll('a', href=True)[0]
|
||||
ID_som = abc['href'] #first instance will have the right link...
|
||||
resultURL.append( ID_som )
|
||||
print '(' + str(cnt) + ') [ ' + publish[cnt] + '] Link URL: ' + resultURL[cnt]
|
||||
#print '(' + str(cnt) + ') [ ' + publish[cnt] + '] Link URL: ' + resultURL[cnt]
|
||||
cnt+=1
|
||||
|
||||
else:
|
||||
|
@ -148,16 +148,6 @@ def solicit(month, year):
|
|||
upcoming += populate(resultURL[loopthis], publish[loopthis], shipdate)
|
||||
loopthis -=1
|
||||
|
||||
## not needed.
|
||||
# month +=1 #increment month by 1
|
||||
# mnloop +=1 #increment loop by 1
|
||||
|
||||
# if month > 12: #failsafe failover for months
|
||||
# month = 1
|
||||
# year+=1
|
||||
#---
|
||||
|
||||
#print upcoming
|
||||
logger.info( str(len(upcoming)) + ' upcoming issues discovered.' )
|
||||
|
||||
newfl = mylar.CACHE_DIR + "/future-releases.txt"
|
||||
|
|
|
@ -885,6 +885,6 @@ def weekly_singlecopy(comicid, issuenum, file, path):
|
|||
logger.error('Could not copy ' + str(srcfile) + ' to ' + str(desfile))
|
||||
return
|
||||
|
||||
logger.debug('sucessfully copied to ' + desfile.encode('utf-8').strip() )
|
||||
logger.info('[WEEKLY-PULL] Sucessfully copied to ' + desfile.encode('utf-8').strip() )
|
||||
return
|
||||
|
||||
|
|
Loading…
Reference in New Issue