FIX: Columns not available in sql when importing (#222), FIX: NzbGetwouldn't be sent to when doing multiple-issue selections, IMP: Added warning error msg when series would error out because it was not detected as a multi-volume series.

This commit is contained in:
evilhero 2013-02-22 03:35:51 -05:00
parent 9c6a750196
commit 951603643d
3 changed files with 49 additions and 15 deletions

View File

@ -738,6 +738,8 @@ def dbcheck():
c.execute('CREATE TABLE IF NOT EXISTS weekly (SHIPDATE text, PUBLISHER text, ISSUE text, COMIC VARCHAR(150), EXTRA text, STATUS text)')
# c.execute('CREATE TABLE IF NOT EXISTS sablog (nzo_id TEXT, ComicName TEXT, ComicYEAR TEXT, ComicIssue TEXT, name TEXT, nzo_complete TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS importresults (impID TEXT, ComicName TEXT, ComicYear TEXT, Status TEXT, ImportDate TEXT, ComicFilename TEXT, ComicLocation TEXT, WatchMatch TEXT)')
# c.execute('CREATE TABLE IF NOT EXISTS readlist (IssueID TEXT, ComicName TEXT, Issue_Number TEXT, Status TEXT, DateAdded TEXT)')
conn.commit
c.close
#new
@ -789,9 +791,34 @@ def dbcheck():
c.execute('ALTER TABLE importresults ADD COLUMN WatchMatch TEXT')
try:
c.execute('SELECT inCacheDIR from issues')
c.execute('SELECT IssueCount from importresults')
except sqlite3.OperationalError:
c.execute('ALTER TABLE issues ADD COLUMN inCacheDIR TEXT')
c.execute('ALTER TABLE importresults ADD COLUMN IssueCount TEXT')
try:
c.execute('SELECT ComicLocation from importresults')
except sqlite3.OperationalError:
c.execute('ALTER TABLE importresults ADD COLUMN ComicLocation TEXT')
try:
c.execute('SELECT ComicFilename from importresults')
except sqlite3.OperationalError:
c.execute('ALTER TABLE importresults ADD COLUMN ComicFilename TEXT')
try:
c.execute('SELECT impID from importresults')
except sqlite3.OperationalError:
c.execute('ALTER TABLE importresults ADD COLUMN impID TEXT')
# try:
# c.execute('SELECT inCacheDIR from readlist')
# except sqlite3.OperationalError:
# c.execute('ALTER TABLE readlist ADD COLUMN inCacheDIR TEXT')
# try:
# c.execute('SELECT Location from readlist')
# except sqlite3.OperationalError:
# c.execute('ALTER TABLE readlist ADD COLUMN Location TEXT')
#if it's prior to Wednesday, the issue counts will be inflated by one as the online db's everywhere
#prepare for the next 'new' release of a series. It's caught in updater.py, so let's just store the

View File

@ -21,6 +21,8 @@ import datetime
import re
import urllib
import shutil
import sqlite3
import cherrypy
import mylar
from mylar import logger, helpers, db, mb, albumart, cv, parseit, filechecker, search, updater, moveit
@ -87,6 +89,7 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None):
# gcd will return issue details (most importantly publishing date)
if mismatch == "no" or mismatch is None:
gcdinfo=parseit.GCDScraper(comic['ComicName'], comic['ComicYear'], comic['ComicIssues'], comicid)
#print ("gcdinfo: " + str(gcdinfo))
mismatch_com = "no"
if gcdinfo == "No Match":
updater.no_searchresults(comicid)
@ -275,7 +278,7 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None):
except IndexError:
#account for gcd variation here
if gcdinfo['gcdvariation'] == 'gcd':
#print ("gcd-variation accounted for.")
#logger.fdebug("gcd-variation accounted for.")
issdate = '0000-00-00'
int_issnum = int ( issis / 1000 )
break
@ -285,14 +288,14 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None):
updater.no_searchresults(comicid)
return
elif '.' in str(gcdval['GCDIssue']):
#print ("g-issue:" + str(gcdval['GCDIssue']))
#logger.fdebug("g-issue:" + str(gcdval['GCDIssue']))
issst = str(gcdval['GCDIssue']).find('.')
#print ("issst:" + str(issst))
#logger.fdebug("issst:" + str(issst))
issb4dec = str(gcdval['GCDIssue'])[:issst]
#print ("issb4dec:" + str(issb4dec))
#logger.fdebug("issb4dec:" + str(issb4dec))
#if the length of decimal is only 1 digit, assume it's a tenth
decis = str(gcdval['GCDIssue'])[issst+1:]
#print ("decis:" + str(decis))
#logger.fdebug("decis:" + str(decis))
if len(decis) == 1:
decisval = int(decis) * 10
issaftdec = str(decisval)
@ -300,7 +303,7 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None):
decisval = int(decis)
issaftdec = str(decisval)
gcd_issue = issb4dec + "." + issaftdec
#print ("gcd_issue:" + str(gcd_issue))
#logger.fdebug("gcd_issue:" + str(gcd_issue))
gcdis = (int(issb4dec) * 1000) + decisval
else:
gcdis = int(str(gcdval['GCDIssue'])) * 1000
@ -342,10 +345,14 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None):
if iss_exists:
#print ("Existing status : " + str(iss_exists['Status']))
newValueDict['Status'] = iss_exists['Status']
#logger.fdebug("newValueDict:" + str(newValueDict))
myDB.upsert("issues", newValueDict, controlValueDict)
try:
myDB.upsert("issues", newValueDict, controlValueDict)
except sqlite3.InterfaceError, e:
#raise sqlite3.InterfaceError(e)
logger.error("MAJOR error trying to get issue data, this is most likey a MULTI-VOLUME series and you need to use the custom_exceptions.csv file.")
myDB.action("DELETE FROM comics WHERE ComicID=?", [comicid])
return
n+=1
# logger.debug(u"Updating comic cache for " + comic['ComicName'])

View File

@ -707,7 +707,7 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, nzbprov, nzbpr, Is
tmpapi = "https://"
nzbget_host = mylar.NZBGET_HOST[8]
tmpapi = tmpapi + str(mylar.NZBGET_USERNAME) + ":" + str(mylar.NZBGET_PASSWORD)
tmpapi = tmpapi + "@" + nzbget_host + ":" + str(mylar.NZBGET_PORT) + "/xmlrpc"
tmpapi = tmpapi + "@" + nzbget_host + ":" + str(mylar.NZBGET_PORT) + "/xmlrpc"
server = ServerProxy(tmpapi)
send_to_nzbget = server.appendurl(nzbname, mylar.NZBGET_CATEGORY, mylar.NZBGET_PRIORITY, True, str(linkapi))
if send_to_nzbget is True:
@ -810,7 +810,7 @@ def searchforissue(issueid=None, new=False):
else:
ComicYear = str(result['IssueDate'])[:4]
if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.NZBX) and (mylar.SAB_HOST):
if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.NZBX) and (mylar.USE_SABNZBD or mylar.USE_NZBGET):
foundNZB = search_init(result['ComicName'], result['Issue_Number'], str(ComicYear), comic['ComicYear'], IssueDate, result['IssueID'], AlternateSearch, UseFuzzy)
if foundNZB == "yes":
#print ("found!")
@ -832,7 +832,7 @@ def searchforissue(issueid=None, new=False):
IssueYear = str(result['IssueDate'])[:4]
foundNZB = "none"
if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.NZBX) and (mylar.SAB_HOST):
if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.NZBX) and (mylar.USE_SABNZBD or mylar.USE_NZBGET):
foundNZB = search_init(result['ComicName'], result['Issue_Number'], str(IssueYear), comic['ComicYear'], IssueDate, result['IssueID'], AlternateSearch, UseFuzzy)
if foundNZB == "yes":
#print ("found!")
@ -855,7 +855,7 @@ def searchIssueIDList(issuelist):
ComicYear = comic['ComicYear']
else:
ComicYear = str(issue['IssueDate'])[:4]
if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.NZBX) and (mylar.SAB_HOST):
if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.NZBX) and (mylar.USE_SABNZBD or mylar.USE_NZBGET):
foundNZB = search_init(comic['ComicName'], issue['Issue_Number'], str(ComicYear), comic['ComicYear'], issue['IssueDate'], issue['IssueID'], AlternateSearch, UseFuzzy)
if foundNZB == "yes":
#print ("found!")