FIX:(#1251) Manual post-processing would delete provided folder if empty (no metatagging being used), FIX:(#1250) StoryArcs Check for watchlist matches / Adding a new story arc would fail, FIX: During post-processing, if an issue had a null date of 0000-00-00, and rename files was enabled - would cause an error on returning a null month value, IMP: Added Dynamic naming to story arcs for better matching against watchlist and filenames, IMP: Fixed some errant/unnecessary looping within the story arc modules which should speed things up, FIX: When Snatching items from the story-arc detail page, would fail to update status to Snatched if series wasn't on watchlist, IMP: In Story Arc details, series title will now reference back to the series on you watchlist if it exists

This commit is contained in:
evilhero 2016-04-15 12:44:28 -04:00
parent eb49fbb727
commit c7779872d1
6 changed files with 202 additions and 90 deletions

View File

@ -2,7 +2,7 @@
<%!
import os
import mylar
from mylar.helpers import checked
from mylar.helpers import checked, listLibrary
%>
<%def name="headerIncludes()">
@ -70,6 +70,9 @@
</tr>
</thead>
<tbody>
<%
cids = listLibrary()
%>
%for item in readlist:
<%
if item['Status'] == 'Skipped':
@ -92,11 +95,25 @@
grade = 'X'
else:
grade = 'Z'
if item['ComicID'] in cids:
haveit = cids[item['ComicID']]
else:
haveit = "No"
%>
<tr id="${item['ReadingOrder']}" class="grade${grade}">
<td id="readingorder">${item['ReadingOrder']}</td>
<td id="comicname" title="${item['IssueName']}">${item['ComicName']} (${item['SeriesYear']})</td>
<td id="comicname" title="${item['IssueName']}">
%if haveit == "No":
${item['ComicName']} (${item['SeriesYear']})
%else:
<a href="comicDetails?ComicID=${haveit}">${item['ComicName']} (${item['SeriesYear']})</a>
%endif
</td>
<td id="issue">${item['IssueNumber']}</td>
<%
try:

View File

@ -446,9 +446,28 @@ class PostProcessor(object):
#we can also search by ComicID to just grab those particular arcs as an alternative as well (not done)
logger.fdebug(module + ' Now Checking if the issue also resides in one of the storyarc\'s that I am watching.')
for fl in filelist['comiclist']:
mod_seriesname = '%' + re.sub(' ', '%', fl['series_name']).strip() + '%'
arc_series = myDB.select("SELECT * FROM readinglist WHERE ComicName LIKE?", [fl['series_name']]) # by StoryArcID")
#mod_seriesname = '%' + re.sub(' ', '%', fl['series_name']).strip() + '%'
#arc_series = myDB.select("SELECT * FROM readinglist WHERE ComicName LIKE?", [fl['series_name']]) # by StoryArcID")
manual_arclist = []
as_d = filechecker.FileChecker(watchcomic=fl['series_name'].decode('utf-8'))
as_dinfo = as_d.dynamic_replace(fl['series_name'])
mod_seriesname = as_dinfo['mod_seriesname']
arcloopchk = []
for x in alt_list:
cname = x['AS_DyComicName']
for ab in x['AS_Alt']:
if re.sub('[\|\s]', '', mod_seriesname.lower()).strip() in re.sub('[\|\s]', '', ab.lower()).strip():
if not any(re.sub('[\|\s]', '', cname.lower()) == x for x in arcloopchk):
arcloopchk.append(re.sub('[\|\s]', '', cname.lower()))
#make sure we add back in the original parsed filename here.
if not any(re.sub('[\|\s]', '', mod_seriesname).lower() == x for x in arcloopchk):
arcloopchk.append(re.sub('[\|\s]', '', mod_seriesname.lower()))
tmpsql = "SELECT * FROM readinglist WHERE DynamicComicName IN ({seq}) COLLATE NOCASE".format(seq=','.join('?' * len(arcloopchk)))
arc_series = myDB.select(tmpsql, tuple(arcloopchk))
if arc_series is None:
logger.error(module + ' No Story Arcs in Watchlist that contain that particular series - aborting Manual Post Processing. Maybe you should be running Import?')
return
@ -456,25 +475,26 @@ class PostProcessor(object):
arcvals = []
for av in arc_series:
arcvals.append({"ComicName": av['ComicName'],
"ArcValues": {"StoryArc": av['StoryArc'],
"StoryArcID": av['StoryArcID'],
"IssueArcID": av['IssueArcID'],
"ComicName": av['ComicName'],
"ComicPublisher": av['IssuePublisher'],
"IssueID": av['IssueID'],
"IssueNumber": av['IssueNumber'],
"IssueYear": av['IssueYear'], #for some reason this is empty
"ReadingOrder": av['ReadingOrder'],
"IssueDate": av['IssueDate'],
"Status": av['Status'],
"Location": av['Location']},
"WatchValues": {"SeriesYear": av['SeriesYear'],
"LatestDate": av['IssueDate'],
"ComicVersion": 'v' + str(av['SeriesYear']),
"Publisher": av['IssuePublisher'],
"Total": av['TotalIssues'], # this will return the total issues in the arc (not needed for this)
"ComicID": av['ComicID'],
"IsArc": True}
"ArcValues": {"StoryArc": av['StoryArc'],
"StoryArcID": av['StoryArcID'],
"IssueArcID": av['IssueArcID'],
"ComicName": av['ComicName'],
"DynamicComicName": av['DynamicComicName'],
"ComicPublisher": av['IssuePublisher'],
"IssueID": av['IssueID'],
"IssueNumber": av['IssueNumber'],
"IssueYear": av['IssueYear'], #for some reason this is empty
"ReadingOrder": av['ReadingOrder'],
"IssueDate": av['IssueDate'],
"Status": av['Status'],
"Location": av['Location']},
"WatchValues": {"SeriesYear": av['SeriesYear'],
"LatestDate": av['IssueDate'],
"ComicVersion": 'v' + str(av['SeriesYear']),
"Publisher": av['IssuePublisher'],
"Total": av['TotalIssues'], # this will return the total issues in the arc (not needed for this)
"ComicID": av['ComicID'],
"IsArc": True}
})
ccnt=0
@ -619,6 +639,7 @@ class PostProcessor(object):
grdst = storyarcd
#tag the meta.
metaresponse = None
if mylar.ENABLE_META:
logger.info('[STORY-ARC POST-PROCESSING] Metatagging enabled - proceeding...')
try:
@ -676,10 +697,15 @@ class PostProcessor(object):
grab_dst = os.path.join(grdst, dfilename)
logger.fdebug(module + ' Destination Path : ' + grab_dst)
grab_src = os.path.join(self.nzb_folder, ofilename)
if metaresponse:
src_location = odir
else:
src_location = self.nzb_folder
grab_src = os.path.join(src_location, ofilename)
logger.fdebug(module + ' Source Path : ' + grab_src)
logger.info(module + ' ' + mylar.FILE_OPTS + 'ing ' + str(ofilename) + ' into directory : ' + str(grab_dst))
logger.info(module + '[' + mylar.FILE_OPTS + '] ' + str(ofilename) + ' into directory : ' + str(grab_dst))
try:
self.fileop(grab_src, grab_dst)
except (OSError, IOError):
@ -865,6 +891,7 @@ class PostProcessor(object):
issueid = arcdata['IssueID']
#tag the meta.
metaresponse = None
if mylar.ENABLE_META:
self._log("Metatagging enabled - proceeding...")
try:
@ -925,11 +952,16 @@ class PostProcessor(object):
self._log("Destination Path : " + grab_dst)
logger.info(module + ' Destination Path : ' + grab_dst)
grab_src = os.path.join(self.nzb_folder, ofilename)
if metaresponse:
src_location = odir
else:
src_location = self.nzb_folder
grab_src = os.path.join(src_location, ofilename)
self._log("Source Path : " + grab_src)
logger.info(module + ' Source Path : ' + grab_src)
logger.info(module + ' ' + mylar.FILE_OPTS + 'ing ' + str(ofilename) + ' into directory : ' + str(grab_dst))
logger.info(module + '[' + mylar.FILE_OPTS + '] ' + str(ofilename) + ' into directory : ' + str(grab_dst))
try:
self.fileop(grab_src, grab_dst)
@ -941,14 +973,26 @@ class PostProcessor(object):
#tidyup old path
if mylar.FILE_OPTS == 'move':
try:
shutil.rmtree(self.nzb_folder)
#make sure we don't delete the directory passed via manual-pp and ajust for trailling slashes or not
if self.nzb_folder.endswith('/') or self.nzb_folder.endswith('\\'):
tmp_folder = self.nzb_folder[:-1]
else:
tmp_folder = self.nzb_folder
if os.path.isdir(src_location) and odir != tmp_folder:
if not os.listdir(src_location):
shutil.rmtree(src_location)
logger.debug(module + ' Removed temporary directory : ' + src_location)
self._log("Removed temporary directory : " + src_location)
if not os.listdir(self.nzb_folder):
shutil.rmtree(self.nzb_folder)
logger.debug(module + ' Removed temporary directory : ' + self.nzb_folder)
self._log("Removed temporary directory : " + self.nzb_folder)
except (OSError, IOError):
self._log("Failed to remove temporary directory.")
logger.debug(module + ' Failed to remove temporary directory - check directory and manually re-run.')
return
logger.debug(module + ' Removed temporary directory : ' + self.nzb_folder)
self._log("Removed temporary directory : " + self.nzb_folder)
#delete entry from nzblog table
myDB.action('DELETE from nzblog WHERE issueid=?', [issueid])
@ -1285,6 +1329,8 @@ class PostProcessor(object):
logger.fdebug(module + ' Issue Year : ' + str(issueyear))
month = issuenzb['IssueDate'][5:7].replace('-', '').strip()
month_name = helpers.fullmonth(month)
if month_name is None:
month_name = 'None'
# comicnzb= myDB.action("SELECT * from comics WHERE comicid=?", [comicid]).fetchone()
publisher = comicnzb['ComicPublisher']
self._log("Publisher: " + publisher)
@ -1632,8 +1678,12 @@ class PostProcessor(object):
if mylar.FILE_OPTS == 'move':
#tidyup old path
try:
if os.path.isdir(odir) and odir != self.nzb_folder:
logger.fdebug(module + ' self.nzb_folder: ' + self.nzb_folder)
#make sure we don't delete the directory passed via manual-pp and ajust for trailling slashes or not
if self.nzb_folder.endswith('/') or self.nzb_folder.endswith('\\'):
tmp_folder = self.nzb_folder[:-1]
else:
tmp_folder = self.nzb_folder
if os.path.isdir(odir) and odir != tmp_folder:
# check to see if the directory is empty or not.
if not os.listdir(odir):
logger.fdebug(module + ' Tidying up. Deleting folder : ' + odir)

View File

@ -1580,7 +1580,7 @@ def dbcheck():
# c.execute('CREATE TABLE IF NOT EXISTS sablog (nzo_id TEXT, ComicName TEXT, ComicYEAR TEXT, ComicIssue TEXT, name TEXT, nzo_complete TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS importresults (impID TEXT, ComicName TEXT, ComicYear TEXT, Status TEXT, ImportDate TEXT, ComicFilename TEXT, ComicLocation TEXT, WatchMatch TEXT, DisplayName TEXT, SRID TEXT, ComicID TEXT, IssueID TEXT, Volume TEXT, IssueNumber TEXT, DynamicName TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS readlist (IssueID TEXT, ComicName TEXT, Issue_Number TEXT, Status TEXT, DateAdded TEXT, Location TEXT, inCacheDir TEXT, SeriesYear TEXT, ComicID TEXT, StatusChange TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS readinglist(StoryArcID TEXT, ComicName TEXT, IssueNumber TEXT, SeriesYear TEXT, IssueYEAR TEXT, StoryArc TEXT, TotalIssues TEXT, Status TEXT, inCacheDir TEXT, Location TEXT, IssueArcID TEXT, ReadingOrder INT, IssueID TEXT, ComicID TEXT, StoreDate TEXT, IssueDate TEXT, Publisher TEXT, IssuePublisher TEXT, IssueName TEXT, CV_ArcID TEXT, Int_IssueNumber INT)')
c.execute('CREATE TABLE IF NOT EXISTS readinglist(StoryArcID TEXT, ComicName TEXT, IssueNumber TEXT, SeriesYear TEXT, IssueYEAR TEXT, StoryArc TEXT, TotalIssues TEXT, Status TEXT, inCacheDir TEXT, Location TEXT, IssueArcID TEXT, ReadingOrder INT, IssueID TEXT, ComicID TEXT, StoreDate TEXT, IssueDate TEXT, Publisher TEXT, IssuePublisher TEXT, IssueName TEXT, CV_ArcID TEXT, Int_IssueNumber INT, DynamicComicName TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS annuals (IssueID TEXT, Issue_Number TEXT, IssueName TEXT, IssueDate TEXT, Status TEXT, ComicID TEXT, GCDComicID TEXT, Location TEXT, ComicSize TEXT, Int_IssueNumber INT, ComicName TEXT, ReleaseDate TEXT, ReleaseComicID TEXT, ReleaseComicName TEXT, IssueDate_Edit TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS rssdb (Title TEXT UNIQUE, Link TEXT, Pubdate TEXT, Site TEXT, Size TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS futureupcoming (ComicName TEXT, IssueNumber TEXT, ComicID TEXT, IssueID TEXT, IssueDate TEXT, Publisher TEXT, Status TEXT, DisplayComicName TEXT)')
@ -1678,7 +1678,7 @@ def dbcheck():
try:
c.execute('SELECT DynamicComicName from comics')
if DYNAMIC_UPDATE < 2:
if DYNAMIC_UPDATE < 3:
dynamic_upgrade = True
else:
dynamic_upgrade = False
@ -1949,6 +1949,16 @@ def dbcheck():
except sqlite3.OperationalError:
c.execute('ALTER TABLE readinglist ADD COLUMN Int_IssueNumber INT')
try:
c.execute('SELECT DynamicComicName from readinglist')
if DYNAMIC_UPDATE < 3:
dynamic_upgrade = True
else:
dynamic_upgrade = False
except sqlite3.OperationalError:
c.execute('ALTER TABLE readinglist ADD COLUMN DynamicComicName TEXT')
dynamic_upgrade = True
## -- searchresults Table --
try:
c.execute('SELECT SRID from searchresults')

View File

@ -146,6 +146,7 @@ class FileChecker(object):
'comicfilename': runresults['comicfilename'],
'comiclocation': runresults['comiclocation'],
'series_name': runresults['series_name'],
'dynamic_name': runresults['dynamic_name'],
'series_volume': runresults['series_volume'],
'issue_year': runresults['issue_year'],
'issue_number': runresults['issue_number'],
@ -735,6 +736,7 @@ class FileChecker(object):
'comicfilename': filename,
'comiclocation': self.dir,
'series_name': series_name,
'dynamic_name': self.dynamic_replace(series_name)['mod_seriesname'],
'series_volume': issue_volume,
'issue_year': issue_year,
'issue_number': issue_number,

View File

@ -499,6 +499,8 @@ def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=N
issueyear = issuedate[:4]
month = issuedate[5:7].replace('-', '').strip()
month_name = fullmonth(month)
if month_names is None:
month_name = 'None'
logger.fdebug('Issue Year : ' + str(issueyear))
logger.fdebug('Publisher: ' + str(publisher))
logger.fdebug('Series: ' + str(series))
@ -1135,23 +1137,39 @@ def latestdate_fix():
def upgrade_dynamic():
import db, logger
dynamic_list = []
dynamic_comiclist = []
myDB = db.DBConnection()
#update the comicdb to include the Dynamic Names (and any futher changes as required)
clist = myDB.select('SELECT * FROM Comics')
for cl in clist:
cl_d = mylar.filechecker.FileChecker(watchcomic=cl['ComicName'])
cl_dyninfo = cl_d.dynamic_replace(cl['ComicName'])
dynamic_list.append({'DynamicComicName': re.sub('[\|\s]','', cl_dyninfo['mod_seriesname'].lower()).strip(),
dynamic_comiclist.append({'DynamicComicName': re.sub('[\|\s]','', cl_dyninfo['mod_seriesname'].lower()).strip(),
'ComicID': cl['ComicID']})
if len(dynamic_list) > 0:
for dl in dynamic_list:
if len(dynamic_comiclist) > 0:
for dl in dynamic_comiclist:
CtrlVal = {"ComicID": dl['ComicID']}
newVal = {"DynamicComicName": dl['DynamicComicName']}
myDB.upsert("Comics", newVal, CtrlVal)
logger.info('Finshed updating ' + str(len(dynamic_list)) + ' entries within the db.')
mylar.DYNAMIC_UPDATE = 2
#update the readinglistdb to include the Dynamic Names (and any futher changes as required)
dynamic_storylist = []
rlist = myDB.select('SELECT * FROM readinglist WHERE StoryArcID is not NULL')
for rl in rlist:
rl_d = mylar.filechecker.FileChecker(watchcomic=rl['ComicName'])
rl_dyninfo = cl_d.dynamic_replace(rl['ComicName'])
dynamic_storylist.append({'DynamicComicName': re.sub('[\|\s]','', rl_dyninfo['mod_seriesname'].lower()).strip(),
'IssueArcID': rl['IssueArcID']})
if len(dynamic_storylist) > 0:
for ds in dynamic_storylist:
CtrlVal = {"IssueArcID": ds['IssueArcID']}
newVal = {"DynamicComicName": ds['DynamicComicName']}
myDB.upsert("readinglist", newVal, CtrlVal)
logger.info('Finshed updating ' + str(len(dynamic_comiclist)) + ' / ' + str(len(dynamic_storylist)) + ' entries within the db.')
mylar.DYNAMIC_UPDATE = 3
mylar.config_write()
return

View File

@ -1258,6 +1258,9 @@ class WebInterface(object):
foundcom, prov = search.search_init(ComicName=ComicName, IssueNumber=ComicIssue, ComicYear=ComicYear, SeriesYear=None, Publisher=None, IssueDate=IssueDate, StoreDate=StoreDate, IssueID=None, AlternateSearch=None, UseFuzzy=None, ComicVersion=None, SARC=SARC, IssueArcID=IssueArcID)
if foundcom == "yes":
logger.info(u"Downloaded " + ComicName + " #" + ComicIssue + " (" + str(ComicYear) + ")")
controlValueDict = {"IssueArcID": IssueArcID}
newStatus = {"Status": "Snatched"}
myDB.upsert("readinglist", newStatus, controlValueDict)
#raise cherrypy.HTTPRedirect("readlist")
return foundcom
@ -2453,6 +2456,35 @@ class WebInterface(object):
if ArcWatch is None:
logger.info("No Story Arcs to search")
else:
#cycle through the story arcs here for matches on the watchlist
arcdir = helpers.filesafe(ArcWatch[0]['StoryArc'])
if mylar.REPLACE_SPACES:
arcdir = arcdir.replace(' ', mylar.REPLACE_CHAR)
if mylar.STORYARCDIR:
dstloc = os.path.join(mylar.DESTINATION_DIR, 'StoryArcs', arcdir)
else:
dstloc = os.path.join(mylar.DESTINATION_DIR, mylar.GRABBAG_DIR)
# if sarc_title != arc['StoryArc']:
if not os.path.isdir(dstloc):
logger.info('Story Arc Directory [' + dstloc + '] does not exist! - attempting to create now.')
checkdirectory = filechecker.validateAndCreateDirectory(dstloc, True)
if not checkdirectory:
logger.warn('Error trying to validate/create directory. Aborting this process at this time.')
return
#get the list of files within the storyarc directory, if any.
fchk = filechecker.FileChecker(dir=dstloc, watchcomic=None, Publisher=None, sarc='true', justparse=True)
filechk = fchk.listFiles()
fccnt = filechk['comiccount']
logger.fdebug('[STORY ARC DIRECTORY] ' + str(fccnt) + ' files exist within this directory.')
if fccnt > 0:
filelist = filechk['comiclist']
else:
filelist = None
logger.info(filechk)
arc_match = []
wantedlist = []
@ -2460,38 +2492,20 @@ class WebInterface(object):
sarc_title = None
showonreadlist = 1 # 0 won't show storyarcissues on readinglist main page, 1 will show
for arc in ArcWatch:
#cycle through the story arcs here for matches on the watchlist
arcdir = helpers.filesafe(arc['StoryArc'])
if mylar.REPLACE_SPACES:
arcdir = arcdir.replace(' ', mylar.REPLACE_CHAR)
if mylar.STORYARCDIR:
dstloc = os.path.join(mylar.DESTINATION_DIR, 'StoryArcs', arcdir)
else:
dstloc = os.path.join(mylar.DESTINATION_DIR, mylar.GRABBAG_DIR)
# if sarc_title != arc['StoryArc']:
if not os.path.isdir(dstloc):
logger.info('Story Arc Directory [' + dstloc + '] does not exist! - attempting to create now.')
checkdirectory = filechecker.validateAndCreateDirectory(dstloc, True)
if not checkdirectory:
logger.warn('Error trying to validate/create directory. Aborting this process at this time.')
return
sarc_title = arc['StoryArc']
logger.fdebug("arc: " + arc['StoryArc'] + " : " + arc['ComicName'] + " : " + arc['IssueNumber'])
logger.fdebug('[' + arc['StoryArc'] + '] ' + arc['ComicName'] + ' : ' + arc['IssueNumber'])
matcheroso = "no"
mod_seriesname = '%' + re.sub(' ', '%', arc['ComicName']).strip() + '%'
comics = myDB.select('SELECT * FROM comics Where ComicName LIKE ?', [mod_seriesname])
#fc = filechecker.FileChecker(watchcomic=arc['ComicName'])
#modi_names = fc.dynamic_replace(arc['ComicName'])
#mod_arc = re.sub('[\|\s]', '', modi_names['mod_watchcomic'].lower()).strip() #is from the arc db
comics = myDB.select("SELECT * FROM comics WHERE DynamicComicName IN (?) COLLATE NOCASE", [arc['DynamicComicName']])
for comic in comics:
fc = filechecker.FileChecker(watchcomic=arc['ComicName'])
modi_names = fc.dynamic_replace(comic['ComicName'])
mod_arc = modi_names['mod_watchcomic'] #is from the arc db
mod_watch = modi_names['mod_seriesname'] #is from the comics db
mod_watch = comic['DynamicComicName'] #is from the comics db
if mod_watch == mod_arc:# and arc['SeriesYear'] == comic['ComicYear']:
if re.sub('[\|\s]','', mod_watch.lower()).strip() == re.sub('[\|\s]', '', mod_arc.lower()).strip():
logger.fdebug("initial name match - confirming issue # is present in series")
if comic['ComicID'][:1] == 'G':
# if it's a multi-volume series, it's decimalized - let's get rid of the decimal.
@ -2541,32 +2555,33 @@ class WebInterface(object):
"IssueYear": arc['IssueYear']})
logger.fdebug('destination location set to : ' + dstloc)
fchk = filechecker.FileChecker(dir=dstloc, watchcomic=arc['ComicName'], Publisher=None, sarc='true', justparse=True)
filechk = fchk.listFiles()
#fchk = filechecker.FileChecker(dir=dstloc, watchcomic=arc['ComicName'], Publisher=None, sarc='true', justparse=True)
#filechk = fchk.listFiles()
fn = 0
fccnt = filechk['comiccount']
logger.fdebug('files in directory: ' + str(fccnt))
for tmpfc in filechk['comiclist']:
haveissue = "no"
issuedupe = "no"
temploc = tmpfc['issue_number'].replace('_', ' ')
fcdigit = helpers.issuedigits(arc['IssueNumber'])
int_iss = helpers.issuedigits(temploc)
if int_iss == fcdigit:
logger.fdebug(arc['ComicName'] + ' Issue #' + arc['IssueNumber'] + ' already present in StoryArc directory.')
#update readinglist db to reflect status.
if mylar.READ2FILENAME:
readorder = helpers.renamefile_readingorder(arc['ReadingOrder'])
dfilename = str(readorder) + "-" + tmpfc['comicfilename']
else:
dfilename = tmpfc['comicfilename']
valids = [x for x in filelist if re.sub('[\|\s]','', x['dynamic_name'].lower()).strip() == re.sub('[\|\s]','', mod_arc.lower()).strip()]
logger.info('valids: ' + str(valids))
if len(valids) > 0:
for tmpfc in filelist:
haveissue = "no"
issuedupe = "no"
temploc = tmpfc['issue_number'].replace('_', ' ')
fcdigit = helpers.issuedigits(arc['IssueNumber'])
int_iss = helpers.issuedigits(temploc)
if int_iss == fcdigit:
logger.fdebug(arc['ComicName'] + ' Issue #' + arc['IssueNumber'] + ' already present in StoryArc directory.')
#update readinglist db to reflect status.
if mylar.READ2FILENAME:
readorder = helpers.renamefile_readingorder(arc['ReadingOrder'])
dfilename = str(readorder) + "-" + tmpfc['comicfilename']
else:
dfilename = tmpfc['comicfilename']
newVal = {"Status": "Downloaded",
"Location": dfilename} #tmpfc['ComicFilename']}
ctrlVal = {"IssueArcID": arc['IssueArcID']}
myDB.upsert("readinglist", newVal, ctrlVal)
fn+=1
newVal = {"Status": "Downloaded",
"Location": dfilename} #tmpfc['ComicFilename']}
ctrlVal = {"IssueArcID": arc['IssueArcID']}
myDB.upsert("readinglist", newVal, ctrlVal)
fn+=1
logger.fdebug("we matched on " + str(len(arc_match)) + " issues")
for m_arc in arc_match: