mirror of
https://github.com/evilhero/mylar
synced 2025-03-09 13:24:53 +00:00
IMP:(#735) Search now has 3 options available - 'loose', 'all', or 'explicit', FIX: weekly pull-list would error out due to excessive threads being generated, IMP: Removed unecessary cursor statements in weeklypull, streamlined back to db module
This commit is contained in:
parent
cdd409016e
commit
f53905262f
6 changed files with 91 additions and 66 deletions
|
@ -491,7 +491,8 @@
|
|||
</tbody>
|
||||
</table>
|
||||
</form>
|
||||
|
||||
%endif
|
||||
%if mylar.ANNUALS_ON:
|
||||
<div style="position:relative; width:960px; height:10px; margin:10px auto;">
|
||||
<form action="manual_annual_add" method="GET">
|
||||
<input type="hidden" name="comicid" value=${comic['ComicID']}>
|
||||
|
|
|
@ -3,10 +3,15 @@
|
|||
<%def name="headerIncludes()">
|
||||
<div id="subhead_container">
|
||||
<div id="subhead_menu">
|
||||
%if explicit:
|
||||
<a id="menu_link_delete" title="This will search for any of the terms given : ${name}" href="searchit?name=${name |u}&explicit=True">Explicit Search</a>
|
||||
%else:
|
||||
<a id="menu_link_delete" title="This will search explicitly for only the terms given : ${name}" href="searchit?name=${name |u}&explicit=True">Explicit Search</a>
|
||||
%if explicit == 'loose':
|
||||
<a id="menu_link_delete" title="This will search for ALL of the terms given : ${name}" href="searchit?name=${name |u}&explicit=loose">Search ALL terms</a>
|
||||
<a id="menu_link_delete" title="This will search EXPLICITLY for only the terms given : ${name}" href="searchit?name=${name |u}&explicit=explicit">Explicit Search</a>
|
||||
%elif explicit == 'explicit':
|
||||
<a id="menu_link_delete" title="Warning: This will search for ANY of the terms given : ${name} (this could take awhile)" href="searchit?name=${name |u}&explicit=loose">Loose Search</a>
|
||||
<a id="menu_link_delete" title="This will search for ALL of the terms given : ${name}" href="searchit?name=${name |u}&explicit=all">Search ALL terms</a>
|
||||
%elif explicit == 'all':
|
||||
<a id="menu_link_delete" title="This will search EXPLICITLY for only the terms given : ${name}" href="searchit?name=${name |u}&explicit=explicit">Explicit Search</a>
|
||||
<a id="menu_link_delete" title="Warning: This will search for ANY of the terms given : ${name} (this could take awhile)" href="searchit?name=${name |u}&explicit=loose">Loose Search</a>
|
||||
%endif
|
||||
</div>
|
||||
</div>
|
||||
|
@ -14,7 +19,9 @@
|
|||
<%def name="body()">
|
||||
<div id="paddingheader">
|
||||
<%
|
||||
if explicit:
|
||||
if explicit == 'loose':
|
||||
searchtext = "Loose Search results for: </br><center> " + name + "</center>"
|
||||
elif explicit == 'explicit':
|
||||
searchtext = "Explicit Search results for: </br><center> " + name + "</center>"
|
||||
else:
|
||||
searchtext = "Search results for : </br><center>" + name + "</center>"
|
||||
|
|
|
@ -193,37 +193,37 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None,c
|
|||
#else:
|
||||
#sresults = mb.findComic(annComicName, mode, issue=annissues, limityear=annualval['AnnualYear'])
|
||||
#print "annualyear: " + str(annualval['AnnualYear'])
|
||||
logger.fdebug('annualyear:' + str(annualyear))
|
||||
sresults = mb.findComic(annComicName, mode, issue=None)
|
||||
logger.fdebug('[IMPORTER-ANNUAL] - Annual Year:' + str(annualyear))
|
||||
sresults, explicit = mb.findComic(annComicName, mode, issue=None)#,explicit=True)
|
||||
|
||||
type='comic'
|
||||
|
||||
if len(sresults) == 1:
|
||||
logger.fdebug('1 result')
|
||||
logger.fdebug('[IMPORTER-ANNUAL] - 1 result')
|
||||
if len(sresults) > 0:
|
||||
logger.fdebug('there are ' + str(len(sresults)) + ' results.')
|
||||
logger.fdebug('[IMPORTER-ANNUAL] - there are ' + str(len(sresults)) + ' results.')
|
||||
num_res = 0
|
||||
while (num_res < len(sresults)):
|
||||
sr = sresults[num_res]
|
||||
logger.fdebug("description:" + sr['description'])
|
||||
if 'paperback' in sr['description'] or 'collecting' in sr['description'] or 'reprints' in sr['description'] or 'collected' in sr['description']:
|
||||
logger.fdebug('tradeback/collected edition detected - skipping ' + str(sr['comicid']))
|
||||
logger.fdebug('[IMPORTER-ANNUAL] - tradeback/collected edition detected - skipping ' + str(sr['comicid']))
|
||||
else:
|
||||
if comicid in sr['description']:
|
||||
logger.fdebug(str(comicid) + ' found. Assuming it is part of the greater collection.')
|
||||
logger.fdebug('[IMPORTER-ANNUAL] - ' + str(comicid) + ' found. Assuming it is part of the greater collection.')
|
||||
issueid = sr['comicid']
|
||||
logger.fdebug(str(issueid) + ' added to series list as an Annual')
|
||||
logger.fdebug('[IMPORTER-ANNUAL] - ' + str(issueid) + ' added to series list as an Annual')
|
||||
if issueid in annualids:
|
||||
logger.fdebug(str(issueid) + ' already exists & was refreshed.')
|
||||
logger.fdebug('[IMPORTER-ANNUAL] - ' + str(issueid) + ' already exists & was refreshed.')
|
||||
num_res+=1 # need to manually increment since not a for-next loop
|
||||
continue
|
||||
issued = cv.getComic(issueid,'issue')
|
||||
if len(issued) is None or len(issued) == 0:
|
||||
logger.fdebug('Could not find any annual information...')
|
||||
logger.fdebug('[IMPORTER-ANNUAL] - Could not find any annual information...')
|
||||
pass
|
||||
else:
|
||||
n = 0
|
||||
logger.fdebug('there are ' + str(sr['issues']) + ' annuals in this series.')
|
||||
logger.fdebug('[IMPORTER-ANNUAL] - There are ' + str(sr['issues']) + ' annuals in this series.')
|
||||
while (n < int(sr['issues'])):
|
||||
try:
|
||||
firstval = issued['issuechoice'][n]
|
||||
|
@ -250,7 +250,7 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None,c
|
|||
myDB.upsert("annuals", newVals, newCtrl)
|
||||
|
||||
if issuechk is not None and issuetype == 'annual':
|
||||
logger.fdebug('comparing annual ' + str(issuechk) + ' .. to .. ' + str(int_issnum))
|
||||
logger.fdebug('[IMPORTER-ANNUAL] - Comparing annual ' + str(issuechk) + ' .. to .. ' + str(int_issnum))
|
||||
if issuechk == int_issnum:
|
||||
weeklyissue_check.append({"Int_IssueNumber": int_issnum,
|
||||
"Issue_Number": issnum,
|
||||
|
@ -261,20 +261,20 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None,c
|
|||
num_res+=1
|
||||
|
||||
elif len(sresults) == 0 or len(sresults) is None:
|
||||
logger.fdebug('no results, removing the year from the agenda and re-querying.')
|
||||
sresults = mb.findComic(annComicName, mode, issue=None)
|
||||
logger.fdebug('[IMPORTER-ANNUAL] - No results, removing the year from the agenda and re-querying.')
|
||||
sresults, explicit = mb.findComic(annComicName, mode, issue=None)#, explicit=True)
|
||||
if len(sresults) == 1:
|
||||
sr = sresults[0]
|
||||
logger.fdebug(str(comicid) + ' found. Assuming it is part of the greater collection.')
|
||||
logger.fdebug('[IMPORTER-ANNUAL] - ' + str(comicid) + ' found. Assuming it is part of the greater collection.')
|
||||
else:
|
||||
resultset = 0
|
||||
else:
|
||||
logger.fdebug('returning results to screen - more than one possibility')
|
||||
logger.fdebug('[IMPORTER-ANNUAL] - Returning results to screen - more than one possibility')
|
||||
for sr in sresults:
|
||||
if annualyear < sr['comicyear']:
|
||||
logger.fdebug(str(annualyear) + ' is less than ' + str(sr['comicyear']))
|
||||
logger.fdebug('[IMPORTER-ANNUAL] - ' + str(annualyear) + ' is less than ' + str(sr['comicyear']))
|
||||
if int(sr['issues']) > (2013 - int(sr['comicyear'])):
|
||||
logger.fdebug('issue count is wrong')
|
||||
logger.fdebug('[IMPORTER-ANNUAL] - Issue count is wrong')
|
||||
|
||||
#newCtrl = {"IssueID": issueid}
|
||||
#newVals = {"Issue_Number": annualval['AnnualIssue'],
|
||||
|
@ -1171,10 +1171,10 @@ def updateissuedata(comicid, comicname=None, issued=None, comicIssues=None, call
|
|||
logger.error(str(issnum) + ' this has an alpha-numeric in the issue # which I cannot account for.')
|
||||
return
|
||||
#get the latest issue / date using the date.
|
||||
logger.fdebug('issue : ' + str(issnum))
|
||||
logger.fdebug('latest date: ' + str(latestdate))
|
||||
logger.fdebug('first date: ' + str(firstdate))
|
||||
logger.fdebug('issue date: ' + str(firstval['Issue_Date']))
|
||||
#logger.fdebug('issue : ' + str(issnum))
|
||||
#logger.fdebug('latest date: ' + str(latestdate))
|
||||
#logger.fdebug('first date: ' + str(firstdate))
|
||||
#logger.fdebug('issue date: ' + str(firstval['Issue_Date']))
|
||||
if firstval['Issue_Date'] > latestdate:
|
||||
if issnum > latestiss:
|
||||
latestiss = issnum
|
||||
|
|
25
mylar/mb.py
25
mylar/mb.py
|
@ -31,13 +31,10 @@ def pullsearch(comicapi,comicquery,offset,explicit):
|
|||
u_comicquery = urllib.quote(comicquery.encode('utf-8').strip())
|
||||
u_comicquery = u_comicquery.replace(" ", "%20")
|
||||
|
||||
# as of 02/15/2014 this is buggered up.
|
||||
#FALSE
|
||||
if explicit == False:
|
||||
if explicit == 'all' or explicit == 'loose':
|
||||
PULLURL = mylar.CVURL + 'search?api_key=' + str(comicapi) + '&resources=volume&query=' + u_comicquery + '&field_list=id,name,start_year,site_detail_url,count_of_issues,image,publisher,description&format=xml&offset=' + str(offset)
|
||||
|
||||
else:
|
||||
#TRUE
|
||||
# 02/22/2014 use the volume filter label to get the right results.
|
||||
PULLURL = mylar.CVURL + 'volumes?api_key=' + str(comicapi) + '&filter=name:' + u_comicquery + '&field_list=id,name,start_year,site_detail_url,count_of_issues,image,publisher,description&format=xml&offset=' + str(offset) # 2012/22/02 - CVAPI flipped back to offset instead of page
|
||||
|
||||
|
@ -74,14 +71,22 @@ def findComic(name, mode, issue, limityear=None, explicit=None):
|
|||
#comicquery=name.replace(" ", "%20")
|
||||
|
||||
if explicit is None:
|
||||
logger.fdebug('explicit is None. Setting to False.')
|
||||
explicit = False
|
||||
#logger.fdebug('explicit is None. Setting to Default mode of ALL search words.')
|
||||
comicquery=name.replace(" ", " AND ")
|
||||
explicit = 'all'
|
||||
|
||||
if explicit:
|
||||
logger.fdebug('changing to explicit mode.')
|
||||
#OR
|
||||
if explicit == 'loose':
|
||||
logger.fdebug('Changing to loose mode - this will match ANY of the search words')
|
||||
comicquery = name.replace(" ", " OR ")
|
||||
elif explicit == 'explicit':
|
||||
logger.fdebug('Changing to explicit mode - this will match explicitly on the EXACT words')
|
||||
comicquery=name.replace(" ", " AND ")
|
||||
else:
|
||||
logger.fdebug('non-explicit mode.')
|
||||
logger.fdebug('Default search mode - this will match on ALL search words')
|
||||
comicquery = name.replace(" ", " AND ")
|
||||
explicit = 'all'
|
||||
|
||||
comicapi='583939a3df0a25fc4e8b7a29934a13078002dc27'
|
||||
|
||||
#let's find out how many results we get from the query...
|
||||
|
@ -158,4 +163,4 @@ def findComic(name, mode, issue, limityear=None, explicit=None):
|
|||
#search results are limited to 100 and by pagination now...let's account for this.
|
||||
countResults = countResults + 100
|
||||
|
||||
return comiclist
|
||||
return comiclist, explicit
|
||||
|
|
|
@ -174,7 +174,7 @@ class WebInterface(object):
|
|||
if len(name) == 0:
|
||||
raise cherrypy.HTTPRedirect("home")
|
||||
if type == 'comic' and mode == 'pullseries':
|
||||
searchresults = mb.findComic(name, mode, issue=issue)
|
||||
searchresults, explicit = mb.findComic(name, mode, issue=issue)
|
||||
elif type == 'comic' and mode == 'series':
|
||||
if name.startswith('4050-'):
|
||||
mismatch = "no"
|
||||
|
@ -182,11 +182,11 @@ class WebInterface(object):
|
|||
logger.info('Attempting to add directly by ComicVineID: ' + str(comicid) + '. I sure hope you know what you are doing.')
|
||||
threading.Thread(target=importer.addComictoDB, args=[comicid,mismatch,None]).start()
|
||||
raise cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % comicid)
|
||||
searchresults = mb.findComic(name, mode, issue=None, explicit=explicit)
|
||||
searchresults, explicit = mb.findComic(name, mode, issue=None, explicit=explicit)
|
||||
elif type == 'comic' and mode == 'want':
|
||||
searchresults = mb.findComic(name, mode, issue)
|
||||
searchresults, explicit = mb.findComic(name, mode, issue)
|
||||
elif type == 'storyarc':
|
||||
searchresults = mb.findComic(name, mode, issue=None, storyarc='yes')
|
||||
searchresults, explicit = mb.findComic(name, mode, issue=None, storyarc='yes')
|
||||
|
||||
searchresults = sorted(searchresults, key=itemgetter('comicyear','issues'), reverse=True)
|
||||
#print ("Results: " + str(searchresults))
|
||||
|
@ -1021,7 +1021,7 @@ class WebInterface(object):
|
|||
#limittheyear.append(cf['IssueDate'][-4:])
|
||||
for ser in cflist:
|
||||
logger.info('looking for new data for ' + ser['ComicName'] + '[#' + str(ser['IssueNumber']) + '] (' + str(ser['IssueDate'][-4:]) + ')')
|
||||
searchresults = mb.findComic(ser['ComicName'], mode='pullseries', issue=ser['IssueNumber'], limityear=ser['IssueDate'][-4:])
|
||||
searchresults, explicit = mb.findComic(ser['ComicName'], mode='pullseries', issue=ser['IssueNumber'], limityear=ser['IssueDate'][-4:], explicit='all')
|
||||
print searchresults
|
||||
if len(searchresults) > 1:
|
||||
logger.info('More than one result returned - this may have to be a manual add')
|
||||
|
@ -1076,7 +1076,7 @@ class WebInterface(object):
|
|||
mylar.dbcheck()
|
||||
logger.info("Deleted existed pull-list data. Recreating Pull-list...")
|
||||
forcecheck = 'yes'
|
||||
threading.Thread(target=weeklypull.pullit, args=[forcecheck]).start()
|
||||
weeklypull.pullit(forcecheck)
|
||||
raise cherrypy.HTTPRedirect("pullist")
|
||||
pullrecreate.exposed = True
|
||||
|
||||
|
@ -1954,10 +1954,10 @@ class WebInterface(object):
|
|||
def confirmResult(self,comicname,comicid):
|
||||
#print ("here.")
|
||||
mode='series'
|
||||
sresults = mb.findComic(comicname, mode, None)
|
||||
sresults, explicit = mb.findComic(comicname, mode, None, explicit='all')
|
||||
#print sresults
|
||||
type='comic'
|
||||
return serve_template(templatename="searchresults.html", title='Import Results for: "' + comicname + '"',searchresults=sresults, type=type, imported='confirm', ogcname=comicid)
|
||||
return serve_template(templatename="searchresults.html", title='Import Results for: "' + comicname + '"',searchresults=sresults, type=type, imported='confirm', ogcname=comicid, explicit=explicit)
|
||||
confirmResult.exposed = True
|
||||
|
||||
def comicScan(self, path, scan=0, libraryscan=0, redirect=None, autoadd=0, imp_move=0, imp_rename=0, imp_metadata=0):
|
||||
|
@ -2175,9 +2175,9 @@ class WebInterface(object):
|
|||
|
||||
mode='series'
|
||||
if yearRANGE is None:
|
||||
sresults = mb.findComic(displaycomic, mode, issue=numissues) #ComicName, mode, issue=numissues)
|
||||
sresults, explicit = mb.findComic(displaycomic, mode, issue=numissues, explicit='all') #ComicName, mode, issue=numissues)
|
||||
else:
|
||||
sresults = mb.findComic(displaycomic, mode, issue=numissues, limityear=yearRANGE) #ComicName, mode, issue=numissues, limityear=yearRANGE)
|
||||
sresults, explicit = mb.findComic(displaycomic, mode, issue=numissues, limityear=yearRANGE, explicit='all') #ComicName, mode, issue=numissues, limityear=yearRANGE)
|
||||
type='comic'
|
||||
|
||||
if len(sresults) == 1:
|
||||
|
@ -2187,7 +2187,7 @@ class WebInterface(object):
|
|||
# #need to move the files here.
|
||||
elif len(sresults) == 0 or len(sresults) is None:
|
||||
implog = implog + "no results, removing the year from the agenda and re-querying.\n"
|
||||
sresults = mb.findComic(displaycomic, mode, issue=numissues) #ComicName, mode, issue=numissues)
|
||||
sresults, explicit = mb.findComic(displaycomic, mode, issue=numissues, explicit='all') #ComicName, mode, issue=numissues)
|
||||
if len(sresults) == 1:
|
||||
sr = sresults[0]
|
||||
implog = implog + "only one result...automagik-mode enabled for " + displaycomic + " :: " + str(sr['comicid']) + "\n"
|
||||
|
@ -2209,7 +2209,7 @@ class WebInterface(object):
|
|||
cresults = self.addComic(comicid=sr['comicid'],comicname=sr['name'],comicyear=sr['comicyear'],comicpublisher=sr['publisher'],comicimage=sr['comicimage'],comicissues=sr['issues'],imported='yes',ogcname=ogcname) #imported=comicstoIMP,ogcname=ogcname)
|
||||
return serve_template(templatename="searchfix.html", title="Error Check", comicname=sr['name'], comicid=sr['comicid'], comicyear=sr['comicyear'], comicimage=sr['comicimage'], comicissues=sr['issues'], cresults=cresults, imported='yes', ogcname=str(ogcname))
|
||||
else:
|
||||
return serve_template(templatename="searchresults.html", title='Import Results for: "' + displaycomic + '"',searchresults=sresults, type=type, imported='yes', ogcname=ogcname) #imported=comicstoIMP, ogcname=ogcname)
|
||||
return serve_template(templatename="searchresults.html", title='Import Results for: "' + displaycomic + '"',searchresults=sresults, type=type, imported='yes', ogcname=ogcname, explicit=explicit) #imported=comicstoIMP, ogcname=ogcname)
|
||||
preSearchit.exposed = True
|
||||
|
||||
def pretty_git(self, br_history):
|
||||
|
|
|
@ -42,11 +42,9 @@ def pullit(forcecheck=None):
|
|||
else:
|
||||
pulldate = pull_date['SHIPDATE']
|
||||
except (sqlite3.OperationalError, TypeError),msg:
|
||||
conn=sqlite3.connect(mylar.DB_FILE)
|
||||
c=conn.cursor()
|
||||
logger.info(u"Error Retrieving weekly pull list - attempting to adjust")
|
||||
c.execute('DROP TABLE weekly')
|
||||
c.execute('CREATE TABLE IF NOT EXISTS weekly (SHIPDATE text, PUBLISHER text, ISSUE text, COMIC VARCHAR(150), EXTRA text, STATUS text, ComicID text)')
|
||||
myDB.action("DROP TABLE weekly")
|
||||
myDB.action("CREATE TABLE IF NOT EXISTS weekly (SHIPDATE text, PUBLISHER text, ISSUE text, COMIC VARCHAR(150), EXTRA text, STATUS text, ComicID text)")
|
||||
pulldate = '00000000'
|
||||
logger.fdebug(u"Table re-created, trying to populate")
|
||||
else:
|
||||
|
@ -346,15 +344,17 @@ def pullit(forcecheck=None):
|
|||
logger.info(u"Populating the NEW Weekly Pull list into Mylar.")
|
||||
newtxtfile.close()
|
||||
|
||||
mylardb = os.path.join(mylar.DATA_DIR, "mylar.db")
|
||||
#mylardb = os.path.join(mylar.DATA_DIR, "mylar.db")
|
||||
|
||||
connection = sqlite3.connect(str(mylardb))
|
||||
cursor = connection.cursor()
|
||||
#connection = sqlite3.connect(str(mylardb))
|
||||
#cursor = connection.cursor()
|
||||
|
||||
cursor.executescript('drop table if exists weekly;')
|
||||
#cursor.execute('drop table if exists weekly;')
|
||||
myDB.action("drop table if exists weekly")
|
||||
myDB.action("CREATE TABLE IF NOT EXISTS weekly (SHIPDATE, PUBLISHER text, ISSUE text, COMIC VARCHAR(150), EXTRA text, STATUS text, ComicID text)")
|
||||
|
||||
cursor.execute("CREATE TABLE IF NOT EXISTS weekly (SHIPDATE, PUBLISHER text, ISSUE text, COMIC VARCHAR(150), EXTRA text, STATUS text, ComicID text);")
|
||||
connection.commit()
|
||||
#cursor.execute("CREATE TABLE IF NOT EXISTS weekly (SHIPDATE, PUBLISHER text, ISSUE text, COMIC VARCHAR(150), EXTRA text, STATUS text, ComicID text);")
|
||||
#connection.commit()
|
||||
|
||||
|
||||
csvfile = open(newfl, "rb")
|
||||
|
@ -368,14 +368,23 @@ def pullit(forcecheck=None):
|
|||
#print (row)
|
||||
try:
|
||||
logger.debug("Row: %s" % row)
|
||||
cursor.execute("INSERT INTO weekly VALUES (?,?,?,?,?,?,null);", row)
|
||||
|
||||
controlValueDict = {'COMIC': row[3],
|
||||
'ISSUE': row[2],
|
||||
'EXTRA': row[4] }
|
||||
newValueDict = {'SHIPDATE': row[0],
|
||||
'PUBLISHER': row[1],
|
||||
'STATUS': row[5],
|
||||
'COMICID': None }
|
||||
myDB.upsert("weekly", newValueDict, controlValueDict)
|
||||
#cursor.execute("INSERT INTO weekly VALUES (?,?,?,?,?,?,null);", row)
|
||||
except Exception, e:
|
||||
#print ("Error - invald arguments...-skipping")
|
||||
pass
|
||||
t+=1
|
||||
csvfile.close()
|
||||
connection.commit()
|
||||
connection.close()
|
||||
#connection.commit()
|
||||
#connection.close()
|
||||
logger.info(u"Weekly Pull List successfully loaded.")
|
||||
#let's delete the files
|
||||
pullpath = str(mylar.CACHE_DIR) + "/"
|
||||
|
@ -725,6 +734,7 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None, futurepul
|
|||
else:
|
||||
# here we add to upcoming table...
|
||||
statusupdate = updater.upcoming_update(ComicID=ComicID, ComicName=ComicName, IssueNumber=ComicIssue, IssueDate=ComicDate, forcecheck=forcecheck, futurepull='yes', altissuenumber=altissuenum)
|
||||
|
||||
# here we update status of weekly table...
|
||||
if statusupdate is not None:
|
||||
cstatus = statusupdate['Status']
|
||||
|
@ -757,9 +767,11 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None, futurepul
|
|||
|
||||
|
||||
def check(fname, txt):
|
||||
try:
|
||||
with open(fname) as dataf:
|
||||
return any(txt in line for line in dataf)
|
||||
|
||||
except:
|
||||
return None
|
||||
|
||||
def loaditup(comicname, comicid, issue, chktype):
|
||||
myDB = db.DBConnection()
|
||||
|
|
Loading…
Add table
Reference in a new issue