FIX:(#1498) When deleting series, will now delete from readinglist if delete folder is also enabled, FIX:(#1499) Better error handling when nzb/torrent provider does not respond with results, FIX: When adding story arcs, the status of 'None' would be applied to all issues not physically found on device - will now default to Skipped, FIX:(#1492) When issues for arc were not stored in directory, could not properly locate issue in series folder when determining the location (especially for Download Issue option). Will now save full path to issue regardless of if the issue is in the arc folder, or not, FIX:(#1497) If file is in archived status when post-processing but no location is stored for archived file, would fail during dupecheck check, FIX: ALT_PULL 0 method would fail in some cases due to not being able to verify against the site's certificate

This commit is contained in:
evilhero 2016-12-23 14:52:37 -05:00
parent b5814c9e22
commit 701131a5ef
5 changed files with 182 additions and 57 deletions

View File

@ -52,21 +52,15 @@
<%
storyarcdest = os.path.join(mylar.DESTINATION_DIR, 'StoryArcs')
%>
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="storyarcdir" id="storyarcdir" value="1" ${checked(mylar.STORYARCDIR)} /><label>Arcs in StoryArc Directory </br><small>(${storyarcdest})</small></label>
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" onclick="getOption(this)" name="storyarcdir" id="storyarcdir" value="1" ${checked(mylar.STORYARCDIR)} /><label>Arcs in StoryArc Directory </br><small>(${storyarcdest})</small></label>
</div>
<div id="arc_options">
<div class="row">
<label>Arc Folder Format</label>
<input type="text" title="$publisher, $spanyears, $arc" name="arc_folderformat" value="${mylar.ARC_FOLDERFORMAT}" size="25">
</div>
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="read2filename" id="read2filename" value="1" ${checked(mylar.READ2FILENAME)} /><label>Append Reading # to filename</label></br>
</div>
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="read2filename" id="read2filename" value="1" ${checked(mylar.READ2FILENAME)} /><label>Append Reading # to filename</label></br>
<%
if mylar.STORYARCDIR:
carcdir = 'StoryArc'
else:
carcdir = 'GrabBag'
%>
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="copy2arcdir" id="copy2arcdir" value="1" ${checked(mylar.COPY2ARCDIR)} />
<select name="arc_fileops" id="arc_fileops">
<%
@ -84,7 +78,7 @@
%>
<option value=${x} ${outputselect}>${x}</option>
%endfor
</select><label> watchlisted issues to ${carcdir} Directory</label>
</select><label> watchlisted issues to <span id="arcopts"></span> Directory</label>
</div>
</fieldset>
<div>
@ -155,6 +149,15 @@
};
</script>
<script type="text/javascript">
function getOption(sel){
if (sel.checked) {
document.getElementById('arcopts').innerHTML = 'StoryArc';
} else {
document.getElementById('arcopts').innerHTML = 'GrabBag';
}
};
</script>
<script type="text/javascript">
$("#menu_link_scan").click(function() {
$('#chkoptions').submit();
return true;
@ -166,10 +169,12 @@
if ($("#storyarcdir").is(":checked"))
{
$("#arc_options").show();
document.getElementById('arcopts').innerHTML = 'StoryArc';
}
else
{
$("#arc_options").hide();
document.getElementById('arcopts').innerHTML = 'GrabBag';
}
$("#storyarcdir").click(function(){
if ($("#storyarcdir").is(":checked"))

View File

@ -1833,9 +1833,10 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None):
mylar.updater.dbUpdate(ComicIDList=cid, calledfrom='dupechk')
return duplicate_filecheck(filename, ComicID, IssueID, StoryArcID)
else:
#not sure if this one is correct - should never actually get to this point.
rtnval.append({'action': "dupe_file",
'to_dupe': os.path.join(series['ComicLocation'], dupchk['Location'])})
#file is Archived, but no entry exists in the db for the location. Assume Archived, and don't post-process.
logger.fdebug('[DUPECHECK] File is Archived but no file can be located within the db at the specified location. Assuming this was a manual archival and will not post-process this issue.')
rtnval.append({'action': "dont_dupe"})
else:
rtnval.append({'action': "dupe_file",
'to_dupe': os.path.join(series['ComicLocation'], dupchk['Location'])})
@ -2213,6 +2214,85 @@ def checkthe_id(comicid=None, up_vals=None):
'ID': up_vals[0]['id']}
myDB.upsert("ref32p", newVal, ctrlVal)
def updatearc_locs(storyarcid, issues):
import db, logger
myDB = db.DBConnection()
issuelist = []
for x in issues:
issuelist.append(x['IssueID'])
tmpsql = "SELECT a.comicid, a.comiclocation, b.comicid, b.status, b.issueid, b.location FROM comics as a INNER JOIN issues as b ON a.comicid = b.comicid WHERE b.issueid in ({seq})".format(seq=','.join(['?'] *(len(issuelist))))
chkthis = myDB.select(tmpsql, issuelist)
update_iss = []
if chkthis is None:
return
else:
for chk in chkthis:
if chk['Status'] == 'Downloaded':
pathsrc = os.path.join(chk['ComicLocation'], chk['Location'])
if not os.path.exists(pathsrc):
if all([mylar.MULTIPLE_DEST_DIRS is not None, mylar.MULTIPLE_DEST_DIRS != 'None', os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(chk['ComicLocation'])) != chk['ComicLocation'], os.path.exists(os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(chk['ComicLocation'])))]):
pathsrc = os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(chk['ComicLocation']), chk['Location'])
else:
logger.fdebug(module + ' file does not exist in location: ' + pathdir + '. Cannot valid location - some options will not be available for this item.')
continue
# update_iss.append({'IssueID': chk['IssueID'],
# 'Location': pathdir})
arcinfo = None
for la in issues:
if la['IssueID'] == chk['IssueID']:
arcinfo = la
break
if arcinfo is None:
continue
if arcinfo['Publisher'] is None:
arcpub = arcinfo['IssuePublisher']
else:
arcpub = arcinfo['Publisher']
grdst = arcformat(arcinfo['StoryArc'], spantheyears(arcinfo['StoryArcID']), arcpub)
logger.info('grdst:' + grdst)
#send to renamer here if valid.
dfilename = chk['Location']
if mylar.RENAME_FILES:
renamed_file = rename_param(arcinfo['ComicID'], arcinfo['ComicName'], arcinfo['IssueNumber'], chk['Location'], issueid=arcinfo['IssueID'], arc=arcinfo['StoryArc'])
if renamed_file:
dfilename = renamed_file['nfilename']
if mylar.READ2FILENAME:
#logger.fdebug('readingorder#: ' + str(arcinfo['ReadingOrder']))
#if int(arcinfo['ReadingOrder']) < 10: readord = "00" + str(arcinfo['ReadingOrder'])
#elif int(arcinfo['ReadingOrder']) >= 10 and int(arcinfo['ReadingOrder']) <= 99: readord = "0" + str(arcinfo['ReadingOrder'])
#else: readord = str(arcinfo['ReadingOrder'])
readord = renamefile_readingorder(arcinfo['ReadingOrder'])
dfilename = str(readord) + "-" + dfilename
pathdst = os.path.join(grdst, dfilename)
logger.fdebug('Destination Path : ' + pathdst)
logger.fdebug('Source Path : ' + pathsrc)
if not os.path.isfile(pathdst):
logger.info('[' + mylar.ARC_FILEOPS.upper() + '] ' + pathsrc + ' into directory : ' + pathdst)
try:
#need to ensure that src is pointing to the series in order to do a soft/hard-link properly
fileoperation = file_ops(pathsrc, pathdst, arc=True)
if not fileoperation:
raise OSError
except (OSError, IOError):
logger.fdebug('[' + mylar.ARC_FILEOPS.upper() + '] Failure ' + pathsrc + ' - check directories and manually re-run.')
continue
update_iss.append({'IssueID': chk['IssueID'],
'Location': pathdst})
for ui in update_iss:
logger.info(ui['IssueID'] + ' to update location to: ' + ui['Location'])
myDB.upsert("readinglist", {'Location': ui['Location']}, {'IssueID': ui['IssueID'], 'StoryArcID': storyarcid})
def spantheyears(storyarcid):
import db
@ -2273,9 +2353,11 @@ def arcformat(arc, spanyears, publisher):
logger.info('StoryArcs')
logger.info(arcpath)
dstloc = os.path.join(mylar.DESTINATION_DIR, 'StoryArcs', arcpath)
else:
elif mylar.COPY2ARCDIR:
logger.warn('Story arc directory is not configured. Defaulting to grabbag directory: ' + mylar.GRABBAG_DIR)
dstloc = mylar.GRABBAG_DIR
else:
dstloc = None
return dstloc

View File

@ -519,7 +519,6 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
logger.fdebug("Sending request to [" + str(nzbprov) + "] RSS for " + ComicName + " : " + str(mod_isssearch))
bb = rsscheck.torrentdbsearch(ComicName, mod_isssearch, ComicID, nzbprov)
rss = "yes"
#if bb is not None: logger.fdebug("bb results: " + str(bb))
else:
cmname = re.sub("%20", " ", str(comsrc))
logger.fdebug("Sending request to RSS for " + str(findcomic) + " : " + str(mod_isssearch) + " (" + str(ComicYear) + ")")
@ -528,7 +527,8 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
else: nzbprov_fix = nzbprov
bb = rsscheck.nzbdbsearch(findcomic, mod_isssearch, ComicID, nzbprov_fix, ComicYear, ComicVersion)
rss = "yes"
#if bb is not None: logger.fdebug("bb results: " + str(bb))
if bb is None:
bb = 'no results'
#this is the API calls
else:
#32P is redudant now since only RSS works
@ -544,6 +544,8 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
a = auth32p.info32p(searchterm=searchterm)
bb = a.searchit()
rss = "no"
if bb is None:
bb = 'no results'
else:
bb = "no results"
rss = "no"
@ -552,7 +554,8 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
logger.fdebug("Sending request to [TPSE] for " + str(cmname) + " : " + str(mod_isssearch))
bb = rsscheck.torrents(pickfeed='TPSE-SEARCH', seriesname=cmname, issue=mod_isssearch)#cmname,issue=mod_isssearch)
rss = "no"
#if bb is not None: logger.fdebug("results: " + str(bb))
if bb is None:
bb = 'no results'
elif nzbprov != 'experimental':
if nzbprov == 'dognzb':
findurl = "https://api.dognzb.cr/api?t=search&q=" + str(comsearch) + "&o=xml&cat=7030"
@ -647,7 +650,7 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
#HTTP Error 503
logger.warn('Aborting search due to Provider unavailability')
foundc = "no"
break
break
try:
if str(r.status_code) != '200':

View File

@ -779,7 +779,6 @@ class WebInterface(object):
resumeSeries.exposed = True
def deleteSeries(self, ComicID, delete_dir=None):
print delete_dir
myDB = db.DBConnection()
comic = myDB.selectone('SELECT * from comics WHERE ComicID=?', [ComicID]).fetchone()
if comic['ComicName'] is None: ComicName = "None"
@ -801,6 +800,7 @@ class WebInterface(object):
logger.warn('Unable to remove directory after removing series from Mylar.')
else:
logger.warn('Unable to remove directory as it does not exist in : ' + seriesdir)
myDB.action('DELETE from readlist WHERE ComicID=?', [ComicID])
helpers.ComicSort(sequence='update')
raise cherrypy.HTTPRedirect("home")
@ -2436,7 +2436,19 @@ class WebInterface(object):
arcpub = arcinfo[0]['Publisher']
lowyear = 9999
maxyear = 0
issref = []
for la in arcinfo:
if all([la['Status'] == 'Downloaded', la['Location'] is None,]):
issref.append({'IssueID': la['IssueID'],
'ComicID': la['ComicID'],
'IssuePublisher': la['IssuePublisher'],
'Publisher': la['Publisher'],
'StoryArc': la['StoryArc'],
'StoryArcID': la['StoryArcID'],
'ComicName': la['ComicName'],
'IssueNumber': la['IssueNumber'],
'ReadingOrder': la['ReadingOrder']})
if la['IssueDate'] is None:
continue
else:
@ -2444,6 +2456,7 @@ class WebInterface(object):
maxyear = int(la['IssueDate'][:4])
if int(la['IssueDate'][:4]) < lowyear:
lowyear = int(la['IssueDate'][:4])
if maxyear == 0:
spanyears = la['SeriesYear']
@ -2457,6 +2470,12 @@ class WebInterface(object):
except:
cvarcid = None
sdir = mylar.GRABBAG_DIR
if len(issref) > 0:
logger.info(issref)
helpers.updatearc_locs(StoryArcID, issref)
arcinfo = myDB.select("SELECT * from readinglist WHERE StoryArcID=? order by ReadingOrder ASC", [StoryArcID])
return serve_template(templatename="storyarc_detail.html", title="Detailed Arc list", readlist=arcinfo, storyarcname=StoryArcName, storyarcid=StoryArcID, cvarcid=cvarcid, sdir=sdir)
detailStoryArc.exposed = True
@ -2749,14 +2768,14 @@ class WebInterface(object):
logger.info('arcpub: ' + arcpub)
dstloc = helpers.arcformat(arcdir, spanyears, arcpub)
if not os.path.isdir(dstloc):
logger.info('Story Arc Directory [' + dstloc + '] does not exist! - attempting to create now.')
checkdirectory = filechecker.validateAndCreateDirectory(dstloc, True)
if not checkdirectory:
logger.warn('Error trying to validate/create directory. Aborting this process at this time.')
return
#if not os.path.isdir(dstloc) and mylar.STORYARCDIR:
# logger.info('Story Arc Directory [' + dstloc + '] does not exist! - attempting to create now.')
# checkdirectory = filechecker.validateAndCreateDirectory(dstloc, True)
# if not checkdirectory:
# logger.warn('Error trying to validate/create directory. Aborting this process at this time.')
# return
if mylar.CVINFO or (mylar.CV_ONLY and mylar.CVINFO):
if all([mylar.CVINFO, mylar.STORYARCDIR]):
if not os.path.isfile(os.path.join(dstloc, "cvinfo")) or mylar.CV_ONETIMER:
logger.fdebug('Generating cvinfo file for story-arc.')
with open(os.path.join(dstloc, "cvinfo"), "w") as text_file:
@ -2770,16 +2789,15 @@ class WebInterface(object):
filechecker.setperms(os.path.join(dstloc, 'cvinfo'))
#get the list of files within the storyarc directory, if any.
fchk = filechecker.FileChecker(dir=dstloc, watchcomic=None, Publisher=None, sarc='true', justparse=True)
filechk = fchk.listFiles()
fccnt = filechk['comiccount']
logger.fdebug('[STORY ARC DIRECTORY] ' + str(fccnt) + ' files exist within this directory.')
if fccnt > 0:
filelist = filechk['comiclist']
else:
filelist = None
logger.info(filechk)
filelist = None
if mylar.STORYARCDIR:
fchk = filechecker.FileChecker(dir=dstloc, watchcomic=None, Publisher=None, sarc='true', justparse=True)
filechk = fchk.listFiles()
fccnt = filechk['comiccount']
logger.fdebug('[STORY ARC DIRECTORY] ' + str(fccnt) + ' files exist within this directory.')
if fccnt > 0:
filelist = filechk['comiclist']
logger.info(filechk)
arc_match = []
wantedlist = []
@ -2838,8 +2856,8 @@ class WebInterface(object):
"match_issuearcid": arc['IssueArcID'],
"match_seriesyear": comic['ComicYear'],
"match_readingorder": arc['ReadingOrder'],
"match_filedirectory": comic['ComicLocation'],
"destination_location": dstloc})
"match_filedirectory": comic['ComicLocation'], #series directory path
"destination_location": dstloc}) #path to given storyarc / grab-bag directory
matcheroso = "yes"
break
if matcheroso == "no":
@ -2849,11 +2867,7 @@ class WebInterface(object):
"IssueNumber": arc['IssueNumber'],
"IssueYear": arc['IssueYear']})
logger.fdebug('destination location set to : ' + dstloc)
#fchk = filechecker.FileChecker(dir=dstloc, watchcomic=arc['ComicName'], Publisher=None, sarc='true', justparse=True)
#filechk = fchk.listFiles()
if filelist is not None:
if filelist is not None and mylar.STORYARCDIR:
fn = 0
valids = [x for x in filelist if re.sub('[\|\s]','', x['dynamic_name'].lower()).strip() == re.sub('[\|\s]','', arc['DynamicComicName'].lower()).strip()]
logger.info('valids: ' + str(valids))
@ -2873,11 +2887,20 @@ class WebInterface(object):
else:
dfilename = tmpfc['comicfilename']
newVal = {"Status": "Downloaded",
"Location": dfilename} #tmpfc['ComicFilename']}
if all([tmpfc['sub'] is not None, tmpfc['sub'] != 'None']):
loc_path = os.path.join(tmpfc['ComicLocation'], tmpfc['sub'], dfilename)
else:
loc_path = os.path.join(tmpfc['ComicLocation'], dfilename)
newVal = {"Status": "Downloaded",
"Location": loc_path} #dfilename}
ctrlVal = {"IssueArcID": arc['IssueArcID']}
myDB.upsert("readinglist", newVal, ctrlVal)
fn+=1
else:
newVal = {"Status": "Skipped"}
ctrlVal = {"IssueArcID": arc['IssueArcID']}
myDB.upsert("readinglist", newVal, ctrlVal)
logger.fdebug("we matched on " + str(len(arc_match)) + " issues")
for m_arc in arc_match:
@ -2891,9 +2914,6 @@ class WebInterface(object):
if issue['Issue_Number'] == m_arc['match_issue']:
logger.fdebug("we matched on " + issue['Issue_Number'] + " for " + m_arc['match_name'])
if issue['Status'] == 'Downloaded' or issue['Status'] == 'Archived' or issue['Status'] == 'Snatched':
ctrlVal = {"IssueArcID": m_arc['match_issuearcid']}
newVal = {"Status": issue['Status'],
"IssueID": issue['IssueID']}
if showonreadlist:
showctrlVal = {"IssueID": issue['IssueID']}
shownewVal = {"ComicName": issue['ComicName'],
@ -2903,10 +2923,14 @@ class WebInterface(object):
"ComicID": m_arc['match_id']}
myDB.upsert("readlist", shownewVal, showctrlVal)
myDB.upsert("readinglist",newVal,ctrlVal)
logger.fdebug("Already have " + issue['ComicName'] + " :# " + issue['Issue_Number'])
if issue['Status'] == 'Downloaded':
if issue['Location'] is not None:
issloc = os.path.join(m_arc['match_filedirectory'], issue['Location'])
else:
issloc = None
location_path = issloc
if issue['Status'] == 'Downloaded':
#check multiple destination directory usage here.
if not os.path.isfile(issloc):
if all([mylar.MULTIPLE_DEST_DIRS is not None, mylar.MULTIPLE_DEST_DIRS != 'None', os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(m_arc['match_filedirectory'])) != issloc, os.path.exists(os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(m_arc['match_filedirectory'])))]):
@ -2916,9 +2940,9 @@ class WebInterface(object):
continue
logger.fdebug('source location set to : ' + issloc)
logger.fdebug('Destination location set to : ' + m_arc['destination_location'])
if mylar.COPY2ARCDIR:
if all([mylar.STORYARCDIR, mylar.COPY2ARCDIR]):
logger.fdebug('Destination location set to : ' + m_arc['destination_location'])
logger.fdebug('Attempting to copy into StoryArc directory')
#copy into StoryArc directory...
if mylar.READ2FILENAME:
@ -2937,9 +2961,20 @@ class WebInterface(object):
raise OSError
except (OSError, IOError):
logger.fdebug(module + ' Failed to ' + mylar.FILE_OPTS + ' ' + issloc + ' - check directories and manually re-run.')
continue
else:
logger.fdebug('Destination file exists: ' + dstloc)
location_path = dstloc
else:
location_path = issloc
ctrlVal = {"IssueArcID": m_arc['match_issuearcid']}
newVal = {'Status': issue['Status'],
'IssueID': issue['IssueID'],
'Location': location_path}
myDB.upsert("readinglist",newVal,ctrlVal)
else:
logger.fdebug("We don't have " + issue['ComicName'] + " :# " + issue['Issue_Number'])
ctrlVal = {"IssueArcID": m_arc['match_issuearcid']}

View File

@ -940,10 +940,10 @@ def new_pullcheck(weeknumber, pullyear, comic1off_name=None, comic1off_id=None,
annualidmatch = [x for x in weeklylist if week['comicid'] is not None and ([xa for xa in x['AnnualIDs'] if int(xa['ComicID']) == int(week['comicid'])])]
#The above will auto-match against ComicID if it's populated on the pullsite, otherwise do name-matching.
namematch = [ab for ab in weeklylist if ab['DynamicName'] == week['dynamicname']]
logger.info('rowid: ' + str(week['rowid']))
logger.info('idmatch: ' + str(idmatch))
logger.info('annualidmatch: ' + str(annualidmatch))
logger.info('namematch: ' + str(namematch))
#logger.fdebug('rowid: ' + str(week['rowid']))
#logger.fdebug('idmatch: ' + str(idmatch))
#logger.fdebug('annualidmatch: ' + str(annualidmatch))
#logger.fdebug('namematch: ' + str(namematch))
if any([idmatch,namematch,annualidmatch]):
if idmatch:
comicname = idmatch[0]['ComicName'].strip()
@ -1238,7 +1238,7 @@ def pull_the_file(newrl):
PULLURL = 'https://www.previewsworld.com/shipping/newreleases.txt'
PULL_AGENT = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.246'}
try:
r = requests.get(PULLURL, verify=False, headers=PULL_AGENT, stream=True)
r = requests.get(PULLURL, verify=True, headers=PULL_AGENT, stream=True)
except requests.exceptions.RequestException as e:
logger.warn(e)
return False