FIX: When dupchecking and file-size was being returned as 0, would result in an assumed archived status and not post-process - now will recheck existing series dir if required, and run dupechk thereafter, FIX: Post-process was not updating file-size of post-processed issue

This commit is contained in:
evilhero 2018-03-07 10:02:48 -05:00
parent c6c31b3588
commit 5553e940a0
3 changed files with 18 additions and 10 deletions

View File

@ -2093,7 +2093,7 @@ class PostProcessor(object):
#delete entry from nzblog table
myDB.action('DELETE from nzblog WHERE issueid=?', [issueid])
updater.totals(comicid, havefiles='+1',issueid=issueid)
updater.totals(comicid, havefiles='+1',issueid=issueid,file=dst)
#update snatched table to change status to Downloaded
if annchk == "no":

View File

@ -2124,10 +2124,14 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None):
mylar.updater.dbUpdate(ComicIDList=cid, calledfrom='dupechk')
return duplicate_filecheck(filename, ComicID, IssueID, StoryArcID)
else:
#file is Archived, but no entry exists in the db for the location. Assume Archived, and don't post-process.
logger.fdebug('[DUPECHECK] File is Archived but no file can be located within the db at the specified location. Assuming this was a manual archival and will not post-process this issue.')
rtnval = {'action': "dont_dupe"}
#file is Archived, but no entry exists in the db for the location. Assume Archived, and don't post-process.
#quick rescan of files in dir, then rerun the dup check again...
mylar.updater.forceRescan(ComicID)
chk1 = duplicate_filecheck(filename, ComicID, IssueID, StoryArcID)
if chk1['action'] == 'dont_dupe':
logger.fdebug('[DUPECHECK] File is Archived but no file can be located within the db at the specified location. Assuming this was a manual archival and will not post-process this issue.')
rtnval = chk1
else:
rtnval = {'action': "dupe_file",
'to_dupe': os.path.join(series['ComicLocation'], dupchk['Location'])}

View File

@ -1552,7 +1552,7 @@ def forceRescan(ComicID, archive=None, module=None):
filechecker.setperms(rescan['ComicLocation'])
logger.info(module + ' I have physically found ' + str(foundcount) + ' issues, ignored ' + str(ignorecount) + ' issues, snatched ' + str(snatchedcount) + ' issues, and accounted for ' + str(totalarc) + ' in an Archived state [ Total Issue Count: ' + str(havefiles) + ' / ' + str(combined_total) + ' ]')
def totals(ComicID, havefiles=None, totalfiles=None, module=None, issueid=None):
def totals(ComicID, havefiles=None, totalfiles=None, module=None, issueid=None, file=None):
if module is None:
module = '[FILE-RESCAN]'
myDB = db.DBConnection()
@ -1566,14 +1566,14 @@ def totals(ComicID, havefiles=None, totalfiles=None, module=None, issueid=None):
if hf is None:
hf = myDB.selectone("SELECT a.Have, a.Total, b.Status as IssStatus FROM comics AS a INNER JOIN annuals as b ON a.ComicID=b.ComicID WHERE b.IssueID=?", [issueid]).fetchone()
totalfiles = int(hf['Total'])
logger.info('totalfiles: %s' % totalfiles)
logger.info('status: %s' % hf['IssStatus'])
logger.fdebug('totalfiles: %s' % totalfiles)
logger.fdebug('status: %s' % hf['IssStatus'])
if hf['IssStatus'] != 'Downloaded':
havefiles = int(hf['Have']) +1
logger.info('incremented havefiles: %s' % havefiles)
logger.fdebug('incremented havefiles: %s' % havefiles)
else:
havefiles = int(hf['Have'])
logger.info('untouched havefiles: %s' % havefiles)
logger.fdebug('untouched havefiles: %s' % havefiles)
#let's update the total count of comics that was found.
#store just the total of issues, since annuals gets tracked seperately.
controlValueStat = {"ComicID": ComicID}
@ -1581,4 +1581,8 @@ def totals(ComicID, havefiles=None, totalfiles=None, module=None, issueid=None):
"Total": totalfiles}
myDB.upsert("comics", newValueStat, controlValueStat)
if file is not None:
controlValueStat = {"IssueID": issueid,
"ComicID": ComicID}
newValueStat = {"ComicSize": os.path.getsize(file)}
myDB.upsert("issues", newValueStat, controlValueStat)