FIX:(#349) Bulk Import would fail if Rename Files was selected (after serires was added), IMP:Added Download Status to WeeklyPull List (when issue is downloaded, status will change accordingly), IMP: Cleaned up some db-locking problems with the idirectory/importResults, FIX: A+X should be found on pullist now, FIX: More fixes for Series with ',' and '&' with search results, FIX: Removed some errant calls - possible fix for (#281) - Downloading already downloaded files

This commit is contained in:
evilhero 2013-04-25 23:45:02 -04:00
parent 659a960b01
commit 2f31266866
9 changed files with 93 additions and 49 deletions

View File

@ -65,6 +65,7 @@ class DBConnection:
except sqlite3.OperationalError, e:
if "unable to open database file" in e.message or "database is locked" in e.message:
logger.warn('Database Error: %s' % e)
logger.warn('sqlresult: %s' % query)
attempt += 1
time.sleep(1)
else:

View File

@ -224,8 +224,8 @@ def decimal_issue(iss):
def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=None):
from mylar import db, logger
myDB = db.DBConnection()
#print ("comicid: " + str(comicid))
#print ("issue#: " + str(issue))
print ("comicid: " + str(comicid))
print ("issue#: " + str(issue))
# the issue here is a non-decimalized version, we need to see if it's got a decimal and if not, add '.00'
# iss_find = issue.find('.')
# if iss_find < 0:
@ -247,8 +247,14 @@ def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=N
if issueid is None:
chkissue = myDB.action("SELECT * from issues WHERE ComicID=? AND Issue_Number=?", [comicid, issue]).fetchone()
if chkissue is None:
logger.error("Invalid Issue_Number - please validate.")
return
#rechk chkissue against int value of issue #
chkissue = myDB.action("SELECT * from issues WHERE ComicID=? AND Issue_Number=?", [comicid, int(issue)]).fetchone()
if chkissue is None:
logger.error("Invalid Issue_Number - please validate.")
return
else:
logger.info("Int Issue_number compare found. continuing...")
issueid = chkissue['IssueID']
else:
issueid = chkissue['IssueID']

View File

@ -622,11 +622,7 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None):
logger.info(u"Attempting to grab wanted issues for : " + comic['ComicName'])
for result in results:
foundNZB = "none"
if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.NZBX) and (mylar.SAB_HOST):
foundNZB = search.searchforissue(result['IssueID'])
if foundNZB == "yes":
updater.foundsearch(result['ComicID'], result['IssueID'])
search.searchforissue(result['IssueID'])
else: logger.info(u"No issues marked as wanted for " + comic['ComicName'])
logger.info(u"Finished grabbing what I could.")

View File

@ -148,6 +148,7 @@ def libraryScan(dir=None, append=False, ComicID=None, ComicName=None, cron=None)
cnt = 0
yearmatch = "false"
foundonwatch = "False"
issue = 999999
while (cnt < lenm):
if m[cnt] is None: break
@ -212,7 +213,11 @@ def libraryScan(dir=None, append=False, ComicID=None, ComicName=None, cron=None)
#assume no year given in filename...
result_comyear = "0000"
print ("cm?: " + str(cn))
comiss = issue
if issue is not '999999':
comiss = issue
else:
logger.ERROR("Invalid Issue number (none present) for " + comfilename)
break
cnsplit = cn.split()
cname = ''
findcn = 0

View File

@ -33,13 +33,13 @@ def movefiles(comicid,comlocation,ogcname,imported=None):
shutil.move(srcimp, dstimp)
except (OSError, IOError):
logger.error("Failed to move files - check directories and manually re-run.")
#print("files moved.")
print("all files moved.")
#now that it's moved / renamed ... we remove it from importResults or mark as completed.
# results = myDB.action("SELECT * FROM importresults WHERE ComicName=?", [ogcname])
# if results is None: pass
# else:
# for result in results:
controlValue = {"impID": impr['impid']}
results = myDB.action("SELECT * from importresults WHERE ComicName=?", [ogcname])
if results is not None:
for result in results:
controlValue = {"impID": result['impid']}
newValue = {"Status": "Imported" }
myDB.upsert("importresults", newValue, controlValue)
return

View File

@ -314,7 +314,7 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, nzbprov, nzbpr, Is
#cm = re.sub("\&", "%26", str(cm1))
cm = re.sub("\\band\\b", "", str(cm1)) # remove 'and' & '&' from the search pattern entirely (broader results, will filter out later)
cm = re.sub("\\bthe\\b", "", cm.lower()) # remove 'the' from the search pattern to accomodate naming differences
cm = re.sub("[\&\:\?]", "", str(cm))
cm = re.sub("[\&\:\?\,\-]", "", str(cm))
#print (cmi)
if '.' in findcomiciss[findcount]:
if len(str(isschk_b4dec)) == 3:
@ -956,11 +956,12 @@ def searchforissue(issueid=None, new=False):
if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.NZBX) and (mylar.USE_SABNZBD or mylar.USE_NZBGET):
foundNZB = search_init(result['ComicName'], result['Issue_Number'], str(IssueYear), comic['ComicYear'], IssueDate, result['IssueID'], AlternateSearch, UseFuzzy, ComicVersion)
if foundNZB == "yes":
#print ("found!")
logger.fdebug("I found " + result['ComicName'] + ' #:' + str(result['Issue_Number']))
updater.foundsearch(ComicID=result['ComicID'], IssueID=result['IssueID'])
else:
pass
#print ("not found!")
return
def searchIssueIDList(issuelist):
myDB = db.DBConnection()

View File

@ -156,6 +156,8 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None)
elif issuechk['Status'] == "Downloaded":
values = { "Status": "Downloaded"}
newValue['Status'] = "Downloaded"
#if the status is Downloaded and it's on the pullist - let's mark it so everyone can bask in the glory
elif issuechk['Status'] == "Wanted":
values = { "Status": "Wanted"}
newValue['Status'] = "Wanted"
@ -177,24 +179,35 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None)
logger.fdebug("...Status already Wanted .. not changing.")
else:
logger.fdebug("...Already have issue - keeping existing status of : " + issuechk['Status'])
if issuechk is None:
myDB.upsert("upcoming", newValue, controlValue)
else:
myDB.upsert("issues", values, control)
if issuechk['Status'] == 'Downloaded':
logger.fdebug("updating Pull-list to reflect status.")
return issuechk['Status']
else:
logger.fdebug("Issues don't match for some reason...weekly new issue: " + str(IssueNumber))
def weekly_update(ComicName):
def weekly_update(ComicName,IssueNumber,CStatus):
# here we update status of weekly table...
# added Issue to stop false hits on series' that have multiple releases in a week
# added CStatus to update status flags on Pullist screen
myDB = db.DBConnection()
controlValue = { "COMIC": str(ComicName)}
if mylar.AUTOWANT_UPCOMING:
newValue = {"STATUS": "Wanted"}
else:
newValue = {"STATUS": "Skipped"}
myDB.upsert("weekly", newValue, controlValue)
issuecheck = myDB.action("SELECT * FROM weekly WHERE COMIC=? AND ISSUE=?", [ComicName,IssueNumber]).fetchone()
if issuecheck is not None:
controlValue = { "COMIC": str(ComicName),
"ISSUE": str(IssueNumber)}
if CStatus:
newValue = {"STATUS": CStatus}
else:
if mylar.AUTOWANT_UPCOMING:
newValue = {"STATUS": "Wanted"}
else:
newValue = {"STATUS": "Skipped"}
myDB.upsert("weekly", newValue, controlValue)
def newpullcheck(ComicName, ComicID):
# When adding a new comic, let's check for new issues on this week's pullist and update.
@ -239,12 +252,6 @@ def foundsearch(ComicID, IssueID):
issue = myDB.action('SELECT * FROM issues WHERE IssueID=?', [IssueID]).fetchone()
CYear = issue['IssueDate'][:4]
# fc = filechecker.listFiles(comic['ComicLocation'], comic['ComicName'])
# HaveDict = {"ComicID": ComicID}
# newHave = { "Have": fc['comiccount'] }
# myDB.upsert("comics", newHave, HaveDict)
# #---
issue = myDB.action('SELECT * FROM issues WHERE IssueID=? AND ComicID=?', [IssueID, ComicID]).fetchone()
# update the status to Snatched (so it won't keep on re-downloading!)
logger.fdebug("updating status to snatched")
controlValue = {"IssueID": IssueID}

View File

@ -927,6 +927,7 @@ class WebInterface(object):
# if it's a multi-volume series, it's decimalized - let's get rid of the decimal.
GCDissue, whocares = helpers.decimal_issue(arc['IssueNumber'])
GCDissue = int(GCDissue) / 1000
if '.' not in str(GCDissue): GCDissue = str(GCDissue) + ".00"
logger.fdebug("issue converted to " + str(GCDissue))
isschk = myDB.action("SELECT * FROM issues WHERE ComicName=? AND Issue_Number=?", [comic['ComicName'], str(GCDissue)]).fetchone()
else:
@ -992,6 +993,14 @@ class WebInterface(object):
ArcWatchlist.exposed = True
def ReadGetWanted(self, StoryArcID):
# this will queue up (ie. make 'Wanted') issues in a given Story Arc that are 'Not Watched'
myDB = db.DBConnection()
wantedlist = myDB.select("SELECT * FROM readlist WHERE StoryArcID=? AND Status='Not Watched'", [StoryArcID])
if wantedlist is not None:
for want in wantedlist:
self.queueissue(mode='readinglist', ComicName=want['ComicName'], ComicID=None, ComicYear=want['ComicYear'], ComicIssue=want['Issue_Number'], IssueID=None, SeriesYear=want['SeriesYear'])
def ReadMassCopy(self, StoryArcID, StoryArcName):
#this copies entire story arcs into the /cache/<storyarc> folder
#alternatively, it will copy the issues individually directly to a 3rd party device (ie.tablet)
@ -1207,6 +1216,10 @@ class WebInterface(object):
minISSUE = 0
startISSUE = 10000000
comicstoIMP = []
movealreadyonlist = "no"
movedata = []
for result in results:
if result is None:
break
@ -1225,15 +1238,17 @@ class WebInterface(object):
if mylar.IMP_MOVE:
logger.info("Mass import - Move files")
comloc = myDB.action("SELECT * FROM comics WHERE ComicID=?", [comicid]).fetchone()
mylar.moveit.movefiles(comicid,comloc['ComicLocation'],ComicName)
#check for existing files...
updater.forceRescan(comicid)
movedata_comicid = comicid
movedata_comiclocation = comloc['ComicLocation']
movedata_comicname = ComicName
movealreadyonlist = "yes"
#mylar.moveit.movefiles(comicid,comloc['ComicLocation'],ComicName)
#check for existing files... (this is already called after move files in importer)
#updater.forceRescan(comicid)
else:
print ("nothing to do if I'm not moving.")
#hit the archiver in movefiles here...
raise cherrypy.HTTPRedirect("importResults")
raise cherrypy.HTTPRedirect("importResults")
else:
comicstoIMP.append(result['ComicLocation'].decode(mylar.SYS_ENCODING, 'replace'))
getiss = result['impID'].rfind('-')
@ -1250,6 +1265,15 @@ class WebInterface(object):
if int(getiss) < int(startISSUE):
print ("issue now set to : " + str(getiss) + " ... it was : " + str(startISSUE))
startISSUE = str(getiss)
#taking this outside of the transaction in an attempt to stop db locking.
if mylar.IMP_MOVE and movealreadyonlist == "yes":
# for md in movedata:
mylar.moveit.movefiles(movedata_comicid, movedata_comiclocation, movedata_comicname)
updater.forceRescan(comicid)
raise cherrypy.HTTPRedirect("importResults")
#figure out # of issues and the year range allowable
if yearTOP > 0:
maxyear = int(yearTOP) - (int(minISSUE) / 12)

View File

@ -412,12 +412,12 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None):
while (cnt > -1):
lines[cnt] = lines[cnt].upper()
#llen[cnt] = str(llen[cnt])
#logger.fdebug("looking for : " + str(lines[cnt]))
logger.fdebug("looking for : " + str(lines[cnt]))
sqlsearch = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\&\'\?\@]', ' ', lines[cnt])
sqlsearch = re.sub(r'\s', '%', sqlsearch)
if 'THE' in sqlsearch: sqlsearch = re.sub('THE', '', sqlsearch)
if '+' in sqlsearch: sqlsearch = re.sub('\+', '%PLUS%', sqlsearch)
#logger.fdebug("searchsql: " + str(sqlsearch))
logger.fdebug("searchsql: " + str(sqlsearch))
weekly = myDB.select('SELECT PUBLISHER, ISSUE, COMIC, EXTRA, SHIPDATE FROM weekly WHERE COMIC LIKE (?)', [sqlsearch])
#cur.execute('SELECT PUBLISHER, ISSUE, COMIC, EXTRA, SHIPDATE FROM weekly WHERE COMIC LIKE (?)', [lines[cnt]])
for week in weekly:
@ -425,7 +425,7 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None):
break
for nono in not_t:
if nono in week['PUBLISHER']:
#logger.fdebug("nono present")
logger.fdebug("nono present")
break
if nono in week['ISSUE']:
#logger.fdebug("graphic novel/tradeback detected..ignoring.")
@ -442,8 +442,8 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None):
#-NEW-
# strip out all special characters and compare
watchcomic = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\&\+\'\?\@]', '', unlines[cnt])
comicnm = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\&\+\'\?\@]', '', comicnm)
watchcomic = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\&\'\?\@]', '', unlines[cnt])
comicnm = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\&\'\?\@]', '', comicnm)
watchcomic = re.sub(r'\s', '', watchcomic)
comicnm = re.sub(r'\s', '', comicnm)
#logger.fdebug("Revised_Watch: " + watchcomic)
@ -456,10 +456,14 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None):
modcomicnm = comicnm
#thnx to A+X for this...
if '+' in watchcomic:
if 'plus' in comicnm.lower():
modcomicnm = re.sub('plus', '+', comicnm)
logger.fdebug("+ detected...adjusting.")
logger.fdebug("comicnm:" + comicnm)
logger.fdebug("watchcomic:" + watchcomic)
modwatchcomic = re.sub('\+', 'PLUS', modwatchcomic)
logger.fdebug("modcomicnm:" + modcomicnm)
logger.fdebug("modwatchcomic:" + modwatchcomic)
if comicnm == watchcomic.upper() or modcomicnm == modwatchcomic.upper():
#logger.fdebug("matched on:" + str(comicnm) + "..." + str(watchcomic).upper())
logger.fdebug("matched on:" + str(comicnm) + "..." + str(watchcomic).upper())
pass
elif ("ANNUAL" in week['EXTRA']):
pass
@ -493,9 +497,9 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None):
# here we add to comics.latest
updater.latest_update(ComicID=ComicID, LatestIssue=ComicIssue, LatestDate=ComicDate)
# here we add to upcoming table...
updater.upcoming_update(ComicID=ComicID, ComicName=ComicName, IssueNumber=ComicIssue, IssueDate=ComicDate, forcecheck=forcecheck)
statusupdate = updater.upcoming_update(ComicID=ComicID, ComicName=ComicName, IssueNumber=ComicIssue, IssueDate=ComicDate, forcecheck=forcecheck)
# here we update status of weekly table...
updater.weekly_update(ComicName=week['COMIC'])
updater.weekly_update(ComicName=week['COMIC'], IssueNumber=ComicIssue, CStatus=statusupdate)
break
break
break