1
0
Fork 0
mirror of https://github.com/evilhero/mylar synced 2025-03-09 13:24:53 +00:00

IMP:(#735) Search now has 3 options available - 'loose', 'all', or 'explicit', FIX: weekly pull-list would error out due to excessive threads being generated, IMP: Removed unecessary cursor statements in weeklypull, streamlined back to db module

This commit is contained in:
evilhero 2014-06-04 14:53:33 -04:00
parent cdd409016e
commit f53905262f
6 changed files with 91 additions and 66 deletions

View file

@ -491,7 +491,8 @@
</tbody> </tbody>
</table> </table>
</form> </form>
%endif
%if mylar.ANNUALS_ON:
<div style="position:relative; width:960px; height:10px; margin:10px auto;"> <div style="position:relative; width:960px; height:10px; margin:10px auto;">
<form action="manual_annual_add" method="GET"> <form action="manual_annual_add" method="GET">
<input type="hidden" name="comicid" value=${comic['ComicID']}> <input type="hidden" name="comicid" value=${comic['ComicID']}>

View file

@ -3,10 +3,15 @@
<%def name="headerIncludes()"> <%def name="headerIncludes()">
<div id="subhead_container"> <div id="subhead_container">
<div id="subhead_menu"> <div id="subhead_menu">
%if explicit: %if explicit == 'loose':
<a id="menu_link_delete" title="This will search for any of the terms given : ${name}" href="searchit?name=${name |u}&explicit=True">Explicit Search</a> <a id="menu_link_delete" title="This will search for ALL of the terms given : ${name}" href="searchit?name=${name |u}&explicit=loose">Search ALL terms</a>
%else: <a id="menu_link_delete" title="This will search EXPLICITLY for only the terms given : ${name}" href="searchit?name=${name |u}&explicit=explicit">Explicit Search</a>
<a id="menu_link_delete" title="This will search explicitly for only the terms given : ${name}" href="searchit?name=${name |u}&explicit=True">Explicit Search</a> %elif explicit == 'explicit':
<a id="menu_link_delete" title="Warning: This will search for ANY of the terms given : ${name} (this could take awhile)" href="searchit?name=${name |u}&explicit=loose">Loose Search</a>
<a id="menu_link_delete" title="This will search for ALL of the terms given : ${name}" href="searchit?name=${name |u}&explicit=all">Search ALL terms</a>
%elif explicit == 'all':
<a id="menu_link_delete" title="This will search EXPLICITLY for only the terms given : ${name}" href="searchit?name=${name |u}&explicit=explicit">Explicit Search</a>
<a id="menu_link_delete" title="Warning: This will search for ANY of the terms given : ${name} (this could take awhile)" href="searchit?name=${name |u}&explicit=loose">Loose Search</a>
%endif %endif
</div> </div>
</div> </div>
@ -14,7 +19,9 @@
<%def name="body()"> <%def name="body()">
<div id="paddingheader"> <div id="paddingheader">
<% <%
if explicit: if explicit == 'loose':
searchtext = "Loose Search results for: </br><center> " + name + "</center>"
elif explicit == 'explicit':
searchtext = "Explicit Search results for: </br><center> " + name + "</center>" searchtext = "Explicit Search results for: </br><center> " + name + "</center>"
else: else:
searchtext = "Search results for : </br><center>" + name + "</center>" searchtext = "Search results for : </br><center>" + name + "</center>"

View file

@ -193,37 +193,37 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None,c
#else: #else:
#sresults = mb.findComic(annComicName, mode, issue=annissues, limityear=annualval['AnnualYear']) #sresults = mb.findComic(annComicName, mode, issue=annissues, limityear=annualval['AnnualYear'])
#print "annualyear: " + str(annualval['AnnualYear']) #print "annualyear: " + str(annualval['AnnualYear'])
logger.fdebug('annualyear:' + str(annualyear)) logger.fdebug('[IMPORTER-ANNUAL] - Annual Year:' + str(annualyear))
sresults = mb.findComic(annComicName, mode, issue=None) sresults, explicit = mb.findComic(annComicName, mode, issue=None)#,explicit=True)
type='comic' type='comic'
if len(sresults) == 1: if len(sresults) == 1:
logger.fdebug('1 result') logger.fdebug('[IMPORTER-ANNUAL] - 1 result')
if len(sresults) > 0: if len(sresults) > 0:
logger.fdebug('there are ' + str(len(sresults)) + ' results.') logger.fdebug('[IMPORTER-ANNUAL] - there are ' + str(len(sresults)) + ' results.')
num_res = 0 num_res = 0
while (num_res < len(sresults)): while (num_res < len(sresults)):
sr = sresults[num_res] sr = sresults[num_res]
logger.fdebug("description:" + sr['description']) logger.fdebug("description:" + sr['description'])
if 'paperback' in sr['description'] or 'collecting' in sr['description'] or 'reprints' in sr['description'] or 'collected' in sr['description']: if 'paperback' in sr['description'] or 'collecting' in sr['description'] or 'reprints' in sr['description'] or 'collected' in sr['description']:
logger.fdebug('tradeback/collected edition detected - skipping ' + str(sr['comicid'])) logger.fdebug('[IMPORTER-ANNUAL] - tradeback/collected edition detected - skipping ' + str(sr['comicid']))
else: else:
if comicid in sr['description']: if comicid in sr['description']:
logger.fdebug(str(comicid) + ' found. Assuming it is part of the greater collection.') logger.fdebug('[IMPORTER-ANNUAL] - ' + str(comicid) + ' found. Assuming it is part of the greater collection.')
issueid = sr['comicid'] issueid = sr['comicid']
logger.fdebug(str(issueid) + ' added to series list as an Annual') logger.fdebug('[IMPORTER-ANNUAL] - ' + str(issueid) + ' added to series list as an Annual')
if issueid in annualids: if issueid in annualids:
logger.fdebug(str(issueid) + ' already exists & was refreshed.') logger.fdebug('[IMPORTER-ANNUAL] - ' + str(issueid) + ' already exists & was refreshed.')
num_res+=1 # need to manually increment since not a for-next loop num_res+=1 # need to manually increment since not a for-next loop
continue continue
issued = cv.getComic(issueid,'issue') issued = cv.getComic(issueid,'issue')
if len(issued) is None or len(issued) == 0: if len(issued) is None or len(issued) == 0:
logger.fdebug('Could not find any annual information...') logger.fdebug('[IMPORTER-ANNUAL] - Could not find any annual information...')
pass pass
else: else:
n = 0 n = 0
logger.fdebug('there are ' + str(sr['issues']) + ' annuals in this series.') logger.fdebug('[IMPORTER-ANNUAL] - There are ' + str(sr['issues']) + ' annuals in this series.')
while (n < int(sr['issues'])): while (n < int(sr['issues'])):
try: try:
firstval = issued['issuechoice'][n] firstval = issued['issuechoice'][n]
@ -250,7 +250,7 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None,c
myDB.upsert("annuals", newVals, newCtrl) myDB.upsert("annuals", newVals, newCtrl)
if issuechk is not None and issuetype == 'annual': if issuechk is not None and issuetype == 'annual':
logger.fdebug('comparing annual ' + str(issuechk) + ' .. to .. ' + str(int_issnum)) logger.fdebug('[IMPORTER-ANNUAL] - Comparing annual ' + str(issuechk) + ' .. to .. ' + str(int_issnum))
if issuechk == int_issnum: if issuechk == int_issnum:
weeklyissue_check.append({"Int_IssueNumber": int_issnum, weeklyissue_check.append({"Int_IssueNumber": int_issnum,
"Issue_Number": issnum, "Issue_Number": issnum,
@ -261,20 +261,20 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None,c
num_res+=1 num_res+=1
elif len(sresults) == 0 or len(sresults) is None: elif len(sresults) == 0 or len(sresults) is None:
logger.fdebug('no results, removing the year from the agenda and re-querying.') logger.fdebug('[IMPORTER-ANNUAL] - No results, removing the year from the agenda and re-querying.')
sresults = mb.findComic(annComicName, mode, issue=None) sresults, explicit = mb.findComic(annComicName, mode, issue=None)#, explicit=True)
if len(sresults) == 1: if len(sresults) == 1:
sr = sresults[0] sr = sresults[0]
logger.fdebug(str(comicid) + ' found. Assuming it is part of the greater collection.') logger.fdebug('[IMPORTER-ANNUAL] - ' + str(comicid) + ' found. Assuming it is part of the greater collection.')
else: else:
resultset = 0 resultset = 0
else: else:
logger.fdebug('returning results to screen - more than one possibility') logger.fdebug('[IMPORTER-ANNUAL] - Returning results to screen - more than one possibility')
for sr in sresults: for sr in sresults:
if annualyear < sr['comicyear']: if annualyear < sr['comicyear']:
logger.fdebug(str(annualyear) + ' is less than ' + str(sr['comicyear'])) logger.fdebug('[IMPORTER-ANNUAL] - ' + str(annualyear) + ' is less than ' + str(sr['comicyear']))
if int(sr['issues']) > (2013 - int(sr['comicyear'])): if int(sr['issues']) > (2013 - int(sr['comicyear'])):
logger.fdebug('issue count is wrong') logger.fdebug('[IMPORTER-ANNUAL] - Issue count is wrong')
#newCtrl = {"IssueID": issueid} #newCtrl = {"IssueID": issueid}
#newVals = {"Issue_Number": annualval['AnnualIssue'], #newVals = {"Issue_Number": annualval['AnnualIssue'],
@ -1171,10 +1171,10 @@ def updateissuedata(comicid, comicname=None, issued=None, comicIssues=None, call
logger.error(str(issnum) + ' this has an alpha-numeric in the issue # which I cannot account for.') logger.error(str(issnum) + ' this has an alpha-numeric in the issue # which I cannot account for.')
return return
#get the latest issue / date using the date. #get the latest issue / date using the date.
logger.fdebug('issue : ' + str(issnum)) #logger.fdebug('issue : ' + str(issnum))
logger.fdebug('latest date: ' + str(latestdate)) #logger.fdebug('latest date: ' + str(latestdate))
logger.fdebug('first date: ' + str(firstdate)) #logger.fdebug('first date: ' + str(firstdate))
logger.fdebug('issue date: ' + str(firstval['Issue_Date'])) #logger.fdebug('issue date: ' + str(firstval['Issue_Date']))
if firstval['Issue_Date'] > latestdate: if firstval['Issue_Date'] > latestdate:
if issnum > latestiss: if issnum > latestiss:
latestiss = issnum latestiss = issnum

View file

@ -31,14 +31,11 @@ def pullsearch(comicapi,comicquery,offset,explicit):
u_comicquery = urllib.quote(comicquery.encode('utf-8').strip()) u_comicquery = urllib.quote(comicquery.encode('utf-8').strip())
u_comicquery = u_comicquery.replace(" ", "%20") u_comicquery = u_comicquery.replace(" ", "%20")
# as of 02/15/2014 this is buggered up. if explicit == 'all' or explicit == 'loose':
#FALSE
if explicit == False:
PULLURL = mylar.CVURL + 'search?api_key=' + str(comicapi) + '&resources=volume&query=' + u_comicquery + '&field_list=id,name,start_year,site_detail_url,count_of_issues,image,publisher,description&format=xml&offset=' + str(offset) PULLURL = mylar.CVURL + 'search?api_key=' + str(comicapi) + '&resources=volume&query=' + u_comicquery + '&field_list=id,name,start_year,site_detail_url,count_of_issues,image,publisher,description&format=xml&offset=' + str(offset)
else: else:
#TRUE # 02/22/2014 use the volume filter label to get the right results.
# 02/22/2014 use the volume filter label to get the right results.
PULLURL = mylar.CVURL + 'volumes?api_key=' + str(comicapi) + '&filter=name:' + u_comicquery + '&field_list=id,name,start_year,site_detail_url,count_of_issues,image,publisher,description&format=xml&offset=' + str(offset) # 2012/22/02 - CVAPI flipped back to offset instead of page PULLURL = mylar.CVURL + 'volumes?api_key=' + str(comicapi) + '&filter=name:' + u_comicquery + '&field_list=id,name,start_year,site_detail_url,count_of_issues,image,publisher,description&format=xml&offset=' + str(offset) # 2012/22/02 - CVAPI flipped back to offset instead of page
#all these imports are standard on most modern python implementations #all these imports are standard on most modern python implementations
@ -74,14 +71,22 @@ def findComic(name, mode, issue, limityear=None, explicit=None):
#comicquery=name.replace(" ", "%20") #comicquery=name.replace(" ", "%20")
if explicit is None: if explicit is None:
logger.fdebug('explicit is None. Setting to False.') #logger.fdebug('explicit is None. Setting to Default mode of ALL search words.')
explicit = False comicquery=name.replace(" ", " AND ")
explicit = 'all'
if explicit: #OR
logger.fdebug('changing to explicit mode.') if explicit == 'loose':
logger.fdebug('Changing to loose mode - this will match ANY of the search words')
comicquery = name.replace(" ", " OR ")
elif explicit == 'explicit':
logger.fdebug('Changing to explicit mode - this will match explicitly on the EXACT words')
comicquery=name.replace(" ", " AND ") comicquery=name.replace(" ", " AND ")
else: else:
logger.fdebug('non-explicit mode.') logger.fdebug('Default search mode - this will match on ALL search words')
comicquery = name.replace(" ", " AND ")
explicit = 'all'
comicapi='583939a3df0a25fc4e8b7a29934a13078002dc27' comicapi='583939a3df0a25fc4e8b7a29934a13078002dc27'
#let's find out how many results we get from the query... #let's find out how many results we get from the query...
@ -158,4 +163,4 @@ def findComic(name, mode, issue, limityear=None, explicit=None):
#search results are limited to 100 and by pagination now...let's account for this. #search results are limited to 100 and by pagination now...let's account for this.
countResults = countResults + 100 countResults = countResults + 100
return comiclist return comiclist, explicit

View file

@ -174,7 +174,7 @@ class WebInterface(object):
if len(name) == 0: if len(name) == 0:
raise cherrypy.HTTPRedirect("home") raise cherrypy.HTTPRedirect("home")
if type == 'comic' and mode == 'pullseries': if type == 'comic' and mode == 'pullseries':
searchresults = mb.findComic(name, mode, issue=issue) searchresults, explicit = mb.findComic(name, mode, issue=issue)
elif type == 'comic' and mode == 'series': elif type == 'comic' and mode == 'series':
if name.startswith('4050-'): if name.startswith('4050-'):
mismatch = "no" mismatch = "no"
@ -182,11 +182,11 @@ class WebInterface(object):
logger.info('Attempting to add directly by ComicVineID: ' + str(comicid) + '. I sure hope you know what you are doing.') logger.info('Attempting to add directly by ComicVineID: ' + str(comicid) + '. I sure hope you know what you are doing.')
threading.Thread(target=importer.addComictoDB, args=[comicid,mismatch,None]).start() threading.Thread(target=importer.addComictoDB, args=[comicid,mismatch,None]).start()
raise cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % comicid) raise cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % comicid)
searchresults = mb.findComic(name, mode, issue=None, explicit=explicit) searchresults, explicit = mb.findComic(name, mode, issue=None, explicit=explicit)
elif type == 'comic' and mode == 'want': elif type == 'comic' and mode == 'want':
searchresults = mb.findComic(name, mode, issue) searchresults, explicit = mb.findComic(name, mode, issue)
elif type == 'storyarc': elif type == 'storyarc':
searchresults = mb.findComic(name, mode, issue=None, storyarc='yes') searchresults, explicit = mb.findComic(name, mode, issue=None, storyarc='yes')
searchresults = sorted(searchresults, key=itemgetter('comicyear','issues'), reverse=True) searchresults = sorted(searchresults, key=itemgetter('comicyear','issues'), reverse=True)
#print ("Results: " + str(searchresults)) #print ("Results: " + str(searchresults))
@ -1021,7 +1021,7 @@ class WebInterface(object):
#limittheyear.append(cf['IssueDate'][-4:]) #limittheyear.append(cf['IssueDate'][-4:])
for ser in cflist: for ser in cflist:
logger.info('looking for new data for ' + ser['ComicName'] + '[#' + str(ser['IssueNumber']) + '] (' + str(ser['IssueDate'][-4:]) + ')') logger.info('looking for new data for ' + ser['ComicName'] + '[#' + str(ser['IssueNumber']) + '] (' + str(ser['IssueDate'][-4:]) + ')')
searchresults = mb.findComic(ser['ComicName'], mode='pullseries', issue=ser['IssueNumber'], limityear=ser['IssueDate'][-4:]) searchresults, explicit = mb.findComic(ser['ComicName'], mode='pullseries', issue=ser['IssueNumber'], limityear=ser['IssueDate'][-4:], explicit='all')
print searchresults print searchresults
if len(searchresults) > 1: if len(searchresults) > 1:
logger.info('More than one result returned - this may have to be a manual add') logger.info('More than one result returned - this may have to be a manual add')
@ -1076,7 +1076,7 @@ class WebInterface(object):
mylar.dbcheck() mylar.dbcheck()
logger.info("Deleted existed pull-list data. Recreating Pull-list...") logger.info("Deleted existed pull-list data. Recreating Pull-list...")
forcecheck = 'yes' forcecheck = 'yes'
threading.Thread(target=weeklypull.pullit, args=[forcecheck]).start() weeklypull.pullit(forcecheck)
raise cherrypy.HTTPRedirect("pullist") raise cherrypy.HTTPRedirect("pullist")
pullrecreate.exposed = True pullrecreate.exposed = True
@ -1954,10 +1954,10 @@ class WebInterface(object):
def confirmResult(self,comicname,comicid): def confirmResult(self,comicname,comicid):
#print ("here.") #print ("here.")
mode='series' mode='series'
sresults = mb.findComic(comicname, mode, None) sresults, explicit = mb.findComic(comicname, mode, None, explicit='all')
#print sresults #print sresults
type='comic' type='comic'
return serve_template(templatename="searchresults.html", title='Import Results for: "' + comicname + '"',searchresults=sresults, type=type, imported='confirm', ogcname=comicid) return serve_template(templatename="searchresults.html", title='Import Results for: "' + comicname + '"',searchresults=sresults, type=type, imported='confirm', ogcname=comicid, explicit=explicit)
confirmResult.exposed = True confirmResult.exposed = True
def comicScan(self, path, scan=0, libraryscan=0, redirect=None, autoadd=0, imp_move=0, imp_rename=0, imp_metadata=0): def comicScan(self, path, scan=0, libraryscan=0, redirect=None, autoadd=0, imp_move=0, imp_rename=0, imp_metadata=0):
@ -2175,9 +2175,9 @@ class WebInterface(object):
mode='series' mode='series'
if yearRANGE is None: if yearRANGE is None:
sresults = mb.findComic(displaycomic, mode, issue=numissues) #ComicName, mode, issue=numissues) sresults, explicit = mb.findComic(displaycomic, mode, issue=numissues, explicit='all') #ComicName, mode, issue=numissues)
else: else:
sresults = mb.findComic(displaycomic, mode, issue=numissues, limityear=yearRANGE) #ComicName, mode, issue=numissues, limityear=yearRANGE) sresults, explicit = mb.findComic(displaycomic, mode, issue=numissues, limityear=yearRANGE, explicit='all') #ComicName, mode, issue=numissues, limityear=yearRANGE)
type='comic' type='comic'
if len(sresults) == 1: if len(sresults) == 1:
@ -2187,7 +2187,7 @@ class WebInterface(object):
# #need to move the files here. # #need to move the files here.
elif len(sresults) == 0 or len(sresults) is None: elif len(sresults) == 0 or len(sresults) is None:
implog = implog + "no results, removing the year from the agenda and re-querying.\n" implog = implog + "no results, removing the year from the agenda and re-querying.\n"
sresults = mb.findComic(displaycomic, mode, issue=numissues) #ComicName, mode, issue=numissues) sresults, explicit = mb.findComic(displaycomic, mode, issue=numissues, explicit='all') #ComicName, mode, issue=numissues)
if len(sresults) == 1: if len(sresults) == 1:
sr = sresults[0] sr = sresults[0]
implog = implog + "only one result...automagik-mode enabled for " + displaycomic + " :: " + str(sr['comicid']) + "\n" implog = implog + "only one result...automagik-mode enabled for " + displaycomic + " :: " + str(sr['comicid']) + "\n"
@ -2209,7 +2209,7 @@ class WebInterface(object):
cresults = self.addComic(comicid=sr['comicid'],comicname=sr['name'],comicyear=sr['comicyear'],comicpublisher=sr['publisher'],comicimage=sr['comicimage'],comicissues=sr['issues'],imported='yes',ogcname=ogcname) #imported=comicstoIMP,ogcname=ogcname) cresults = self.addComic(comicid=sr['comicid'],comicname=sr['name'],comicyear=sr['comicyear'],comicpublisher=sr['publisher'],comicimage=sr['comicimage'],comicissues=sr['issues'],imported='yes',ogcname=ogcname) #imported=comicstoIMP,ogcname=ogcname)
return serve_template(templatename="searchfix.html", title="Error Check", comicname=sr['name'], comicid=sr['comicid'], comicyear=sr['comicyear'], comicimage=sr['comicimage'], comicissues=sr['issues'], cresults=cresults, imported='yes', ogcname=str(ogcname)) return serve_template(templatename="searchfix.html", title="Error Check", comicname=sr['name'], comicid=sr['comicid'], comicyear=sr['comicyear'], comicimage=sr['comicimage'], comicissues=sr['issues'], cresults=cresults, imported='yes', ogcname=str(ogcname))
else: else:
return serve_template(templatename="searchresults.html", title='Import Results for: "' + displaycomic + '"',searchresults=sresults, type=type, imported='yes', ogcname=ogcname) #imported=comicstoIMP, ogcname=ogcname) return serve_template(templatename="searchresults.html", title='Import Results for: "' + displaycomic + '"',searchresults=sresults, type=type, imported='yes', ogcname=ogcname, explicit=explicit) #imported=comicstoIMP, ogcname=ogcname)
preSearchit.exposed = True preSearchit.exposed = True
def pretty_git(self, br_history): def pretty_git(self, br_history):

View file

@ -42,11 +42,9 @@ def pullit(forcecheck=None):
else: else:
pulldate = pull_date['SHIPDATE'] pulldate = pull_date['SHIPDATE']
except (sqlite3.OperationalError, TypeError),msg: except (sqlite3.OperationalError, TypeError),msg:
conn=sqlite3.connect(mylar.DB_FILE)
c=conn.cursor()
logger.info(u"Error Retrieving weekly pull list - attempting to adjust") logger.info(u"Error Retrieving weekly pull list - attempting to adjust")
c.execute('DROP TABLE weekly') myDB.action("DROP TABLE weekly")
c.execute('CREATE TABLE IF NOT EXISTS weekly (SHIPDATE text, PUBLISHER text, ISSUE text, COMIC VARCHAR(150), EXTRA text, STATUS text, ComicID text)') myDB.action("CREATE TABLE IF NOT EXISTS weekly (SHIPDATE text, PUBLISHER text, ISSUE text, COMIC VARCHAR(150), EXTRA text, STATUS text, ComicID text)")
pulldate = '00000000' pulldate = '00000000'
logger.fdebug(u"Table re-created, trying to populate") logger.fdebug(u"Table re-created, trying to populate")
else: else:
@ -346,15 +344,17 @@ def pullit(forcecheck=None):
logger.info(u"Populating the NEW Weekly Pull list into Mylar.") logger.info(u"Populating the NEW Weekly Pull list into Mylar.")
newtxtfile.close() newtxtfile.close()
mylardb = os.path.join(mylar.DATA_DIR, "mylar.db") #mylardb = os.path.join(mylar.DATA_DIR, "mylar.db")
connection = sqlite3.connect(str(mylardb)) #connection = sqlite3.connect(str(mylardb))
cursor = connection.cursor() #cursor = connection.cursor()
cursor.executescript('drop table if exists weekly;') #cursor.execute('drop table if exists weekly;')
myDB.action("drop table if exists weekly")
myDB.action("CREATE TABLE IF NOT EXISTS weekly (SHIPDATE, PUBLISHER text, ISSUE text, COMIC VARCHAR(150), EXTRA text, STATUS text, ComicID text)")
cursor.execute("CREATE TABLE IF NOT EXISTS weekly (SHIPDATE, PUBLISHER text, ISSUE text, COMIC VARCHAR(150), EXTRA text, STATUS text, ComicID text);") #cursor.execute("CREATE TABLE IF NOT EXISTS weekly (SHIPDATE, PUBLISHER text, ISSUE text, COMIC VARCHAR(150), EXTRA text, STATUS text, ComicID text);")
connection.commit() #connection.commit()
csvfile = open(newfl, "rb") csvfile = open(newfl, "rb")
@ -368,14 +368,23 @@ def pullit(forcecheck=None):
#print (row) #print (row)
try: try:
logger.debug("Row: %s" % row) logger.debug("Row: %s" % row)
cursor.execute("INSERT INTO weekly VALUES (?,?,?,?,?,?,null);", row)
controlValueDict = {'COMIC': row[3],
'ISSUE': row[2],
'EXTRA': row[4] }
newValueDict = {'SHIPDATE': row[0],
'PUBLISHER': row[1],
'STATUS': row[5],
'COMICID': None }
myDB.upsert("weekly", newValueDict, controlValueDict)
#cursor.execute("INSERT INTO weekly VALUES (?,?,?,?,?,?,null);", row)
except Exception, e: except Exception, e:
#print ("Error - invald arguments...-skipping") #print ("Error - invald arguments...-skipping")
pass pass
t+=1 t+=1
csvfile.close() csvfile.close()
connection.commit() #connection.commit()
connection.close() #connection.close()
logger.info(u"Weekly Pull List successfully loaded.") logger.info(u"Weekly Pull List successfully loaded.")
#let's delete the files #let's delete the files
pullpath = str(mylar.CACHE_DIR) + "/" pullpath = str(mylar.CACHE_DIR) + "/"
@ -725,6 +734,7 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None, futurepul
else: else:
# here we add to upcoming table... # here we add to upcoming table...
statusupdate = updater.upcoming_update(ComicID=ComicID, ComicName=ComicName, IssueNumber=ComicIssue, IssueDate=ComicDate, forcecheck=forcecheck, futurepull='yes', altissuenumber=altissuenum) statusupdate = updater.upcoming_update(ComicID=ComicID, ComicName=ComicName, IssueNumber=ComicIssue, IssueDate=ComicDate, forcecheck=forcecheck, futurepull='yes', altissuenumber=altissuenum)
# here we update status of weekly table... # here we update status of weekly table...
if statusupdate is not None: if statusupdate is not None:
cstatus = statusupdate['Status'] cstatus = statusupdate['Status']
@ -757,9 +767,11 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None, futurepul
def check(fname, txt): def check(fname, txt):
with open(fname) as dataf: try:
return any(txt in line for line in dataf) with open(fname) as dataf:
return any(txt in line for line in dataf)
except:
return None
def loaditup(comicname, comicid, issue, chktype): def loaditup(comicname, comicid, issue, chktype):
myDB = db.DBConnection() myDB = db.DBConnection()