FIX:(#1600) Folder volume naming will now default to V1 if not indicated and the tag is used as well as the new SETDEFAULTVOLUME to True in the config.ini, FIX: Fix for post-processing getting locked up on some machines due to story arc post-processing, FIX: Fixed some incorrect css references resulting in some weird displays, FIX: If 32P torrent feed didn't have a torrent size, would error, IMP: Can now add/remove story-arc issues from directly within story-arcs. They will stay removed until arc is deleted. Added arcs are done by providing an issueid and a reading order number to set it to. IMP: Can now change reading order number by directly clicking on reading order column and entering in new sequence number. Screen refresh needs to be done thereafter still however.

This commit is contained in:
evilhero 2017-04-04 11:19:40 -04:00
parent 145df6e9f5
commit 2246aacc7d
9 changed files with 462 additions and 251 deletions

View File

@ -921,7 +921,7 @@ div#artistheader h2 a {
font-family: "Trebuchet MS", Helvetica, Arial, sans-serif;
}
#read_detail th#options {
max-width: 150px;
min-width: 150px;
text-align: left;
}
#read_detail th#comicname {
@ -933,7 +933,7 @@ div#artistheader h2 a {
text-align: center;
}
#read_detail th#status {
max-width: 80px;
max-width: 90px;
text-align: center;
}
#read_detail th#issuedate {
@ -957,7 +957,7 @@ div#artistheader h2 a {
vertical-align: middle;
}
#read_detail td#status {
max-width: 80px;
max-width: 90px;
text-align: center;
vertical-align: middle;
}
@ -967,7 +967,7 @@ div#artistheader h2 a {
vertical-align: middle;
}
#read_detail td#options {
max-width: 150px;
min-width: 150px;
text-align: left;
vertical-align: middle;
}
@ -1013,6 +1013,62 @@ div#artistheader h2 a {
text-align: center;
vertical-align: middle;
}
#arc_detail th#options {
min-width: 150px;
text-align: left;
}
#arc_detail th#comicname {
min-width: 290px;
text-align: left;
}
#arc_detail th#issue {
max-width: 15px;
text-align: center;
}
#arc_detail th#status {
max-width: 75px;
text-align: center;
}
#arc_detail th#issuedate {
max-width: 40px;
text-align: center;
}
#arc_detail th#readingorder {
max-width: 10px;
text-align: left;
vertical-align: middle;
}
#arc_detail td#comicname {
min-width: 290px;
text-align: left;
vertical-align: middle;
font-size: 12px;
}
#arc_detail td#issue {
max-width: 15px;
text-align: left;
vertical-align: middle;
}
#arc_detail td#status {
max-width: 75px;
text-align: center;
vertical-align: middle;
}
#arc_detail td#issuedate {
max-width: 40px;
text-align: center;
vertical-align: middle;
}
#arc_detail td#options {
min-width: 150px;
text-align: left;
vertical-align: middle;
}
#arc_detail td#readingorder {
max-width: 10px;
text-align: left;
vertical-align: middle;
}
#weekly_pull th#publisher {
min-width: 150px;
text-align: left;

View File

@ -68,7 +68,7 @@
</table>
</div>
</div>
<table class="display" id="read_detail">
<table class="display" id="arc_detail">
<thead>
<tr>
<th id="readingorder"></th>
@ -76,7 +76,7 @@
<th id="issue">Issue</th>
<th id="issuedate">Pub Date</th>
<th id="status">Status</th>
<th id="action">Options</th>
<th id="options">Options</th>
</tr>
</thead>
<tbody>
@ -111,15 +111,20 @@
else:
haveit = "No"
if all([item['Volume'] is not None, item['Volume'] != 'None']):
volume = 'V' + item['Volume']
else:
volume = ''
%>
<tr id="${item['ReadingOrder']}" class="grade${grade}">
<td id="readingorder">${item['ReadingOrder']}</td>
<td class="edit" title="Change the order (click to edit)" id="${storyarcid}.${item['IssueArcID']}">${item['ReadingOrder']}</td>
<td id="comicname" title="${item['IssueName']}">
%if haveit == "No":
${item['ComicName']} (${item['SeriesYear']})
${item['ComicName']} ${volume} (${item['SeriesYear']})
%else:
<a href="comicDetails?ComicID=${haveit}">${item['ComicName']} (${item['SeriesYear']})</a>
<a href="comicDetails?ComicID=${haveit}">${item['ComicName']} ${volume} (${item['SeriesYear']})</a>
%endif
</td>
@ -142,21 +147,36 @@
issuedate = '0000-00-00'
%>
<td id="issuedate">${issuedate}</td>
<td id="status">${item['Status']}</td>
<td id="action">
<td id="status">${item['Status']}
%if item['Status'] == 'Downloaded' or item['Status'] == 'Archived':
<a href="#" title="${item['Location']}"><img src="interfaces/default/images/info32.png" height="16" alt="" class="highqual" /></a>
%endif
</td>
<td id="options">
%if any([item['Status'] is None, item['Status'] == None, item['Status'] == 'Skipped']):
<a href="#" onclick="doAjaxCall('queueit?ComicName=${item['ComicName'] | u}&ComicIssue=${item['IssueNumber']}&ComicYear=${issuedate}&mode=readlist&SARC=${item['StoryArc']}&IssueArcID=${item['IssueArcID']}&SeriesYear=${item['SeriesYear']}',$(this),'table')" data-success="Now searching for ${item['ComicName']} #${item['IssueNumber']}"><span class="ui-icon ui-icon-plus"></span>Grab it</a>
<a title="Remove Issue from Story Arc" onclick="doAjaxCall('removefromreadlist?IssueArcID=${item['IssueArcID']}',$(this),'table')" data-success='Successfully deleted ${item['IssueArcID']}'><span class="ui-icon ui-icon-minus"></span>Remove it</a>
<a href="#" onclick="doAjaxCall('queueit?ComicName=${item['ComicName'] | u}&ComicIssue=${item['IssueNumber']}&ComicYear=${issuedate}&mode=readlist&SARC=${item['StoryArc']}&IssueArcID=${item['IssueArcID']}&SeriesYear=${item['SeriesYear']}',$(this),'table')" data-success="Now searching for ${item['ComicName']} #${item['IssueNumber']}"><span class="ui-icon ui-icon-plus"></span>Grab</a>
%elif item['Status'] == 'Snatched':
<a href="#" onclick="doAjaxCall('queueissue?ComicName=${item['ComicName'] | u}&ComicIssue=${item['IssueNumber']}&ComicYear=${issuedate}&mode=readlist&SARC=${item['StoryArc']}&IssueArcID=${item['IssueArcID']}&SeriesYear=${item['SeriesYear']}',$(this),'table')" data-success="Trying to Retry"><span class="ui-icon ui-icon-plus"></span>Retry</a>
%elif item['Status'] == 'Downloaded' and item['Location'] is not None:
<a href="downloadthis?pathfile=${item['Location'] |u}"><img src="interfaces/default/images/download_icon.png" height="25" width="25" title="Download the Issue" class="highqual" /></a>
<a href="downloadthis?pathfile=${item['Location'] |u}"><span class="ui-icon ui-icon-plus"></span>Download</a>
%endif
<a href="#" title="Remove Issue from Story Arc" onclick="doAjaxCall('removefromreadlist?IssueArcID=${item['IssueArcID']}&manual=${item['Manual']}',$(this),'table')" data-success='Successfully deleted ${item['IssueArcID']}'><span class="ui-icon ui-icon-minus"></span>Remove</a>
</td>
</tr>
%endfor
</tbody>
</table>
<div style="position:relative; width:960px; height:10px; margin:10px auto;">
<form action="manual_arc_add" method="GET">
<input type="hidden" name="storyarcid" value=${storyarcid}>
<div style="position:absolute; top:30px; right:10px;">
<center><label><strong><a href="#" title="Enter the IssueID of the issue you want to add to the arc"/>Issue ID</a></strong></label>
<input type="text" name="manual_issueid" size="10">
<label><strong><a href="#" title="Enter the Reading Order # for the given issue"/>Reading Order</a></strong></label>
<input type="text" name="manual_readingorder" size="2"><input type="image" src="interfaces/default/images/submit.png" height="25" width="25" class="highqual" /></center>
</div>
</form>
</div
</%def>
<%def name="headIncludes()">
@ -165,7 +185,19 @@
<%def name="javascriptIncludes()">
<script src="js/libs/jquery.dataTables.min.js"></script>
<script src="js/libs/jquery.jeditable.js"></script>
<script>
$(document).ready(function() {
$('.edit').editable('order_edit', {
callback : function(value, settings) {
console.log(this);
console.log(value);
console.log(settings);
return(value);
}
});
});
</script>
<script type="text/javascript">
$("#menu_link_scan").click(function() {
$('#chkoptions').submit();
@ -178,7 +210,7 @@
$( "#tabs" ).tabs();
});
initActions();
$('#read_detail').dataTable(
$('#arc_detail').dataTable(
{
"bDestroy": true,
"oLanguage": {

View File

@ -923,6 +923,11 @@ class PostProcessor(object):
logger.fdebug(module + ' Issueid: ' + str(issueid))
sarc = nzbiss['SARC']
tmpiss = myDB.selectone('SELECT * FROM issues WHERE IssueID=?', [issueid]).fetchone()
comicid = None
comicname = None
issuenumber = None
if tmpiss is not None:
comicid = tmpiss['ComicID']
comicname = tmpiss['ComicName']
@ -986,17 +991,6 @@ class PostProcessor(object):
if sandwich is not None and 'S' in sandwich:
self._log("One-off STORYARC mode enabled for Post-Processing for " + str(sarc))
logger.info(module + ' One-off STORYARC mode enabled for Post-Processing for ' + str(sarc))
arcdir = helpers.filesafe(sarc)
if mylar.REPLACE_SPACES:
arcdir = arcdir.replace(' ', mylar.REPLACE_CHAR)
if mylar.STORYARCDIR:
storyarcd = os.path.join(mylar.DESTINATION_DIR, "StoryArcs", arcdir)
self._log("StoryArc Directory set to : " + storyarcd)
logger.info(module + ' Story Arc Directory set to : ' + storyarcd)
else:
self._log("Grab-Bag Directory set to : " + mylar.GRABBAG_DIR)
logger.info(module + ' Story Arc Directory set to : ' + mylar.GRABBAG_DIR)
else:
self._log("One-off mode enabled for Post-Processing. All I'm doing is moving the file untouched into the Grab-bag directory.")
logger.info(module + ' One-off mode enabled for Post-Processing. Will move into Grab-bag directory.')
@ -1025,7 +1019,26 @@ class PostProcessor(object):
issuearcid = re.sub('S', '', issueid)
logger.fdebug(module + ' issuearcid:' + str(issuearcid))
arcdata = myDB.selectone("SELECT * FROM readinglist WHERE IssueArcID=?", [issuearcid]).fetchone()
if arcdata is None:
logger.warn(module + ' Unable to locate issue within Story Arcs. Cannot post-process at this time - try to Refresh the Arc and manual post-process if necessary')
self._log('Unable to locate issue within Story Arcs in orde to properly assign metadata. PostProcessing aborted.')
self.valreturn.append({"self.log": self.log,
"mode": 'stop'})
return self.queue.put(self.valreturn)
if arcdata['Publisher'] is None:
arcpub = arcdata['IssuePublisher']
else:
arcpub = arcdata['Publisher']
grdst = helpers.arcformat(arc['StoryArc'], helpers.spantheyears(arcdata['StoryArcID']), arcpub)
if comicid is None:
comicid = arcdata['ComicID']
if comicname is None:
comicname = arcdata['ComicName']
if issuenumber is None:
issuenumber = arcdata['IssueNumber']
issueid = arcdata['IssueID']
#tag the meta.
@ -1037,7 +1050,7 @@ class PostProcessor(object):
self._log("Metatagging enabled - proceeding...")
try:
import cmtagmylar
metaresponse = cmtagmylar.run(self.nzb_folder, issueid=issueid, filename=ofilename)
metaresponse = cmtagmylar.run(self.nzb_folder, issueid=issueid, filename=os.path.join(self.nzb_folder, ofilename))
except ImportError:
logger.warn(module + ' comictaggerlib not found on system. Ensure the ENTIRE lib directory is located within mylar/lib/comictaggerlib/')
metaresponse = "fail"
@ -1060,16 +1073,16 @@ class PostProcessor(object):
dfilename = ofilename
if sandwich is not None and 'S' in sandwich:
if mylar.STORYARCDIR:
grdst = storyarcd
else:
grdst = mylar.DESTINATION_DIR
else:
if mylar.GRABBAG_DIR:
grdst = mylar.GRABBAG_DIR
else:
grdst = mylar.DESTINATION_DIR
#if sandwich is not None and 'S' in sandwich:
# if mylar.STORYARCDIR:
# grdst = storyarcd
# else:
# grdst = mylar.DESTINATION_DIR
#else:
# if mylar.GRABBAG_DIR:
# grdst = mylar.GRABBAG_DIR
# else:
# grdst = mylar.DESTINATION_DIR
checkdirectory = filechecker.validateAndCreateDirectory(grdst, True, module=module)
if not checkdirectory:

View File

@ -187,6 +187,7 @@ CORRECT_METADATA = False
MOVE_FILES = False
RENAME_FILES = False
FOLDER_FORMAT = None
SETDEFAULTVOLUME = False
FILE_FORMAT = None
REPLACE_SPACES = False
REPLACE_CHAR = None
@ -500,7 +501,7 @@ def initialize():
PROWL_ENABLED, PROWL_PRIORITY, PROWL_KEYS, PROWL_ONSNATCH, NMA_ENABLED, NMA_APIKEY, NMA_PRIORITY, NMA_ONSNATCH, PUSHOVER_ENABLED, PUSHOVER_PRIORITY, PUSHOVER_APIKEY, PUSHOVER_USERKEY, PUSHOVER_ONSNATCH, BOXCAR_ENABLED, BOXCAR_ONSNATCH, BOXCAR_TOKEN, \
PUSHBULLET_ENABLED, PUSHBULLET_APIKEY, PUSHBULLET_DEVICEID, PUSHBULLET_ONSNATCH, LOCMOVE, NEWCOM_DIR, FFTONEWCOM_DIR, \
PREFERRED_QUALITY, MOVE_FILES, RENAME_FILES, LOWERCASE_FILENAMES, USE_MINSIZE, MINSIZE, USE_MAXSIZE, MAXSIZE, CORRECT_METADATA, \
FOLDER_FORMAT, FILE_FORMAT, REPLACE_CHAR, REPLACE_SPACES, ADD_TO_CSV, CVINFO, LOG_LEVEL, POST_PROCESSING, POST_PROCESSING_SCRIPT, \
FOLDER_FORMAT, SETDEFAULTVOLUME, FILE_FORMAT, REPLACE_CHAR, REPLACE_SPACES, ADD_TO_CSV, CVINFO, LOG_LEVEL, POST_PROCESSING, POST_PROCESSING_SCRIPT, \
FILE_OPTS, SEARCH_DELAY, GRABBAG_DIR, READ2FILENAME, SEND2READ, MAINTAINSERIESFOLDER, TAB_ENABLE, TAB_HOST, TAB_USER, TAB_PASS, TAB_DIRECTORY, \
STORYARCDIR, COPY2ARCDIR, ARC_FOLDERFORMAT, ARC_FILEOPS, CVURL, CV_VERIFY, CHECK_FOLDER, ENABLE_CHECK_FOLDER, \
COMIC_LOCATION, QUAL_ALTVERS, QUAL_SCANNER, QUAL_TYPE, QUAL_QUALITY, ENABLE_EXTRA_SCRIPTS, EXTRA_SCRIPTS, ENABLE_PRE_SCRIPTS, PRE_SCRIPTS, PULLNEW, ALT_PULL, PULLBYFILE, COUNT_ISSUES, COUNT_HAVES, COUNT_COMICS, \
@ -612,6 +613,7 @@ def initialize():
MOVE_FILES = bool(check_setting_int(CFG, 'General', 'move_files', 0))
RENAME_FILES = bool(check_setting_int(CFG, 'General', 'rename_files', 0))
FOLDER_FORMAT = check_setting_str(CFG, 'General', 'folder_format', '$Series ($Year)')
SETDEFAULTVOLUME = bool(check_setting_int(CFG, 'General', 'setdefaultvolume', 0))
FILE_FORMAT = check_setting_str(CFG, 'General', 'file_format', '$Series $Issue ($Year)')
USE_BLACKHOLE = bool(check_setting_int(CFG, 'General', 'use_blackhole', 0))
BLACKHOLE_DIR = check_setting_str(CFG, 'General', 'blackhole_dir', '')
@ -1446,6 +1448,7 @@ def config_write():
new_config['General']['move_files'] = int(MOVE_FILES)
new_config['General']['rename_files'] = int(RENAME_FILES)
new_config['General']['folder_format'] = FOLDER_FORMAT
new_config['General']['setdefaultvolume'] = int(SETDEFAULTVOLUME)
new_config['General']['file_format'] = FILE_FORMAT
#new_config['General']['use_blackhole'] = int(USE_BLACKHOLE)
new_config['General']['blackhole_dir'] = BLACKHOLE_DIR
@ -1756,7 +1759,7 @@ def dbcheck():
# c.execute('CREATE TABLE IF NOT EXISTS sablog (nzo_id TEXT, ComicName TEXT, ComicYEAR TEXT, ComicIssue TEXT, name TEXT, nzo_complete TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS importresults (impID TEXT, ComicName TEXT, ComicYear TEXT, Status TEXT, ImportDate TEXT, ComicFilename TEXT, ComicLocation TEXT, WatchMatch TEXT, DisplayName TEXT, SRID TEXT, ComicID TEXT, IssueID TEXT, Volume TEXT, IssueNumber TEXT, DynamicName TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS readlist (IssueID TEXT, ComicName TEXT, Issue_Number TEXT, Status TEXT, DateAdded TEXT, Location TEXT, inCacheDir TEXT, SeriesYear TEXT, ComicID TEXT, StatusChange TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS readinglist(StoryArcID TEXT, ComicName TEXT, IssueNumber TEXT, SeriesYear TEXT, IssueYEAR TEXT, StoryArc TEXT, TotalIssues TEXT, Status TEXT, inCacheDir TEXT, Location TEXT, IssueArcID TEXT, ReadingOrder INT, IssueID TEXT, ComicID TEXT, StoreDate TEXT, IssueDate TEXT, Publisher TEXT, IssuePublisher TEXT, IssueName TEXT, CV_ArcID TEXT, Int_IssueNumber INT, DynamicComicName TEXT, Volume TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS readinglist(StoryArcID TEXT, ComicName TEXT, IssueNumber TEXT, SeriesYear TEXT, IssueYEAR TEXT, StoryArc TEXT, TotalIssues TEXT, Status TEXT, inCacheDir TEXT, Location TEXT, IssueArcID TEXT, ReadingOrder INT, IssueID TEXT, ComicID TEXT, StoreDate TEXT, IssueDate TEXT, Publisher TEXT, IssuePublisher TEXT, IssueName TEXT, CV_ArcID TEXT, Int_IssueNumber INT, DynamicComicName TEXT, Volume TEXT, Manual TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS annuals (IssueID TEXT, Issue_Number TEXT, IssueName TEXT, IssueDate TEXT, Status TEXT, ComicID TEXT, GCDComicID TEXT, Location TEXT, ComicSize TEXT, Int_IssueNumber INT, ComicName TEXT, ReleaseDate TEXT, ReleaseComicID TEXT, ReleaseComicName TEXT, IssueDate_Edit TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS rssdb (Title TEXT UNIQUE, Link TEXT, Pubdate TEXT, Site TEXT, Size TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS futureupcoming (ComicName TEXT, IssueNumber TEXT, ComicID TEXT, IssueID TEXT, IssueDate TEXT, Publisher TEXT, Status TEXT, DisplayComicName TEXT, weeknumber TEXT, year TEXT)')
@ -2195,6 +2198,11 @@ def dbcheck():
except sqlite3.OperationalError:
c.execute('ALTER TABLE readinglist ADD COLUMN Volume TEXT')
try:
c.execute('SELECT Manual from readinglist')
except sqlite3.OperationalError:
c.execute('ALTER TABLE readinglist ADD COLUMN Manual TEXT')
## -- searchresults Table --
try:
c.execute('SELECT SRID from searchresults')

View File

@ -115,11 +115,14 @@ def getComic(comicid, type, issueid=None, arc=None, arcid=None, arclist=None, co
id = arcid
#since the arclist holds the issueids, and the pertinent reading order - we need to strip out the reading order so this works.
aclist = ''
for ac in arclist.split('|'):
aclist += ac[:ac.find(',')] + '|'
if aclist.endswith('|'):
aclist = aclist[:-1]
islist = aclist
if arclist.startswith('M'):
islist = arclist[1:]
else:
for ac in arclist.split('|'):
aclist += ac[:ac.find(',')] + '|'
if aclist.endswith('|'):
aclist = aclist[:-1]
islist = aclist
else:
id = comicid
islist = None

View File

@ -1792,6 +1792,142 @@ def listStoryArcs():
library[row['CV_ArcID']] = row['CV_ArcID']
return library
def manualArc(issueid, reading_order, storyarcid):
import db
if issueid.startswith('4000-'):
issueid = issueid[5:]
myDB = db.DBConnection()
arc_chk = myDB.select("SELECT * FROM readinglist WHERE StoryArcID=? AND NOT Manual is 'deleted'", [storyarcid])
storyarcname = arc_chk[0]['StoryArc']
storyarcissues = arc_chk[0]['TotalIssues']
iss_arcids = []
for issarc in arc_chk:
iss_arcids.append({"IssueArcID": issarc['IssueArcID'],
"IssueID": issarc['IssueID'],
"Manual": issarc['Manual'],
"ReadingOrder": issarc['ReadingOrder']})
arc_results = mylar.cv.getComic(comicid=None, type='issue', issueid=None, arcid=storyarcid, arclist='M' + str(issueid))
arcval = arc_results['issuechoice'][0]
comicname = arcval['ComicName']
st_d = mylar.filechecker.FileChecker(watchcomic=comicname)
st_dyninfo = st_d.dynamic_replace(comicname)
dynamic_name = re.sub('[\|\s]','', st_dyninfo['mod_seriesname'].lower()).strip()
issname = arcval['Issue_Name']
issid = str(arcval['IssueID'])
comicid = str(arcval['ComicID'])
cidlist = str(comicid)
st_issueid = None
manual_mod = 'added'
new_readorder = []
for aid in iss_arcids:
if aid['IssueID'] == issid:
logger.info('Issue already exists for storyarc [IssueArcID:' + aid['IssueArcID'] + '][Manual:' + aid['Manual'])
st_issueid = aid['IssueArcID']
manual_mod = aid['Manual']
if reading_order is None:
#if no reading order is given, drop in the last spot.
reading_order = len(iss_arcids) + 1
if int(aid['ReadingOrder']) >= int(reading_order):
reading_seq = int(aid['ReadingOrder']) + 1
else:
reading_seq = int(aid['ReadingOrder'])
new_readorder.append({'IssueArcID': aid['IssueArcID'],
'IssueID': aid['IssueID'],
'ReadingOrder': reading_seq})
import random
if st_issueid is None:
st_issueid = str(storyarcid) + "_" + str(random.randint(1000,9999))
issnum = arcval['Issue_Number']
issdate = str(arcval['Issue_Date'])
storedate = str(arcval['Store_Date'])
int_issnum = issuedigits(issnum)
comicid_results = mylar.cv.getComic(comicid=None, type='comicyears', comicidlist=cidlist)
seriesYear = 'None'
issuePublisher = 'None'
seriesVolume = 'None'
if issname is None:
IssueName = 'None'
else:
IssueName = issname[:70]
for cid in comicid_results:
if cid['ComicID'] == comicid:
seriesYear = cid['SeriesYear']
issuePublisher = cid['Publisher']
seriesVolume = cid['Volume']
#assume that the arc is the same
storyarcpublisher = issuePublisher
break
newCtrl = {"IssueID": issid,
"StoryArcID": storyarcid}
newVals = {"ComicID": comicid,
"IssueArcID": st_issueid,
"StoryArc": storyarcname,
"ComicName": comicname,
"Volume": seriesVolume,
"DynamicComicName": dynamic_name,
"IssueName": IssueName,
"IssueNumber": issnum,
"Publisher": storyarcpublisher,
"TotalIssues": str(int(storyarcissues) +1),
"ReadingOrder": int(reading_order), #arbitrarily set it to the last reading order sequence # just to see if it works.
"IssueDate": issdate,
"StoreDate": storedate,
"SeriesYear": seriesYear,
"IssuePublisher": issuePublisher,
"CV_ArcID": storyarcid,
"Int_IssueNumber": int_issnum,
"Manual": manual_mod}
myDB.upsert("readinglist", newVals, newCtrl)
#now we resequence the reading-order to accomdate the change.
logger.info('Adding the new issue into the reading order & resequencing the order to make sure there are no sequence drops...')
new_readorder.append({'IssueArcID': st_issueid,
'IssueID': issid,
'ReadingOrder': int(reading_order)})
newrl = 0
for rl in sorted(new_readorder, key=itemgetter('ReadingOrder'), reverse=False):
if rl['ReadingOrder'] - 1 != newrl:
rorder = newrl + 1
logger.fdebug(rl['IssueID'] + ' - changing reading order seq to : ' + str(rorder))
else:
rorder = rl['ReadingOrder']
logger.fdebug(rl['IssueID'] + ' - setting reading order seq to : ' + str(rorder))
rl_ctrl = {"IssueID": rl['IssueID'],
"IssueArcID": rl['IssueArcID'],
"StoryArcID": storyarcid}
r1_new = {"ReadingOrder": rorder}
newrl = rorder
myDB.upsert("readinglist", r1_new, rl_ctrl)
#check to see if the issue exists already so we can set the status right away.
iss_chk = myDB.selectone('SELECT * FROM issues where issueid = ?', [issueid]).fetchone()
if iss_chk is None:
logger.info('Issue is not currently in your watchlist. Setting status to Skipped')
status_change = 'Skipped'
else:
status_change = iss_chk['Status']
logger.info('Issue currently exists in your watchlist. Setting status to ' + status_change)
myDB.upsert("readinglist", {'Status': status_change}, newCtrl)
return
def listIssues(weeknumber, year):
import db
library = []
@ -2297,41 +2433,44 @@ def updatearc_locs(storyarcid, issues):
arcpub = arcinfo['Publisher']
grdst = arcformat(arcinfo['StoryArc'], spantheyears(arcinfo['StoryArcID']), arcpub)
logger.info('grdst:' + grdst)
if grdst is not None:
logger.info('grdst:' + grdst)
#send to renamer here if valid.
dfilename = chk['Location']
if mylar.RENAME_FILES:
renamed_file = rename_param(arcinfo['ComicID'], arcinfo['ComicName'], arcinfo['IssueNumber'], chk['Location'], issueid=arcinfo['IssueID'], arc=arcinfo['StoryArc'])
if renamed_file:
dfilename = renamed_file['nfilename']
#send to renamer here if valid.
dfilename = chk['Location']
if mylar.RENAME_FILES:
renamed_file = rename_param(arcinfo['ComicID'], arcinfo['ComicName'], arcinfo['IssueNumber'], chk['Location'], issueid=arcinfo['IssueID'], arc=arcinfo['StoryArc'])
if renamed_file:
dfilename = renamed_file['nfilename']
if mylar.READ2FILENAME:
#logger.fdebug('readingorder#: ' + str(arcinfo['ReadingOrder']))
#if int(arcinfo['ReadingOrder']) < 10: readord = "00" + str(arcinfo['ReadingOrder'])
#elif int(arcinfo['ReadingOrder']) >= 10 and int(arcinfo['ReadingOrder']) <= 99: readord = "0" + str(arcinfo['ReadingOrder'])
#else: readord = str(arcinfo['ReadingOrder'])
readord = renamefile_readingorder(arcinfo['ReadingOrder'])
dfilename = str(readord) + "-" + dfilename
if mylar.READ2FILENAME:
#logger.fdebug('readingorder#: ' + str(arcinfo['ReadingOrder']))
#if int(arcinfo['ReadingOrder']) < 10: readord = "00" + str(arcinfo['ReadingOrder'])
#elif int(arcinfo['ReadingOrder']) >= 10 and int(arcinfo['ReadingOrder']) <= 99: readord = "0" + str(arcinfo['ReadingOrder'])
#else: readord = str(arcinfo['ReadingOrder'])
readord = renamefile_readingorder(arcinfo['ReadingOrder'])
dfilename = str(readord) + "-" + dfilename
pathdst = os.path.join(grdst, dfilename)
pathdst = os.path.join(grdst, dfilename)
logger.fdebug('Destination Path : ' + pathdst)
logger.fdebug('Source Path : ' + pathsrc)
if not os.path.isfile(pathdst):
logger.info('[' + mylar.ARC_FILEOPS.upper() + '] ' + pathsrc + ' into directory : ' + pathdst)
logger.fdebug('Destination Path : ' + pathdst)
logger.fdebug('Source Path : ' + pathsrc)
if not os.path.isfile(pathdst):
logger.info('[' + mylar.ARC_FILEOPS.upper() + '] ' + pathsrc + ' into directory : ' + pathdst)
try:
#need to ensure that src is pointing to the series in order to do a soft/hard-link properly
fileoperation = file_ops(pathsrc, pathdst, arc=True)
if not fileoperation:
raise OSError
except (OSError, IOError):
logger.fdebug('[' + mylar.ARC_FILEOPS.upper() + '] Failure ' + pathsrc + ' - check directories and manually re-run.')
continue
try:
#need to ensure that src is pointing to the series in order to do a soft/hard-link properly
fileoperation = file_ops(pathsrc, pathdst, arc=True)
if not fileoperation:
raise OSError
except (OSError, IOError):
logger.fdebug('[' + mylar.ARC_FILEOPS.upper() + '] Failure ' + pathsrc + ' - check directories and manually re-run.')
continue
updateloc = pathdst
else:
updateloc = pathsrc
update_iss.append({'IssueID': chk['IssueID'],
'Location': pathdst})
'Location': updateloc})
for ui in update_iss:
logger.info(ui['IssueID'] + ' to update location to: ' + ui['Location'])

View File

@ -177,157 +177,28 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
#since the weekly issue check could return either annuals or issues, let's initialize it here so it carries through properly.
weeklyissue_check = []
# #let's do the Annual check here.
# if mylar.ANNUALS_ON:
# #we need to check first to see if there are pre-existing annuals that have been manually added, or else they'll get
# #wiped out.
# annualids = [] #to be used to make sure an ID isn't double-loaded
#
# if annload is None:
# pass
# else:
# for manchk in annload:
# if manchk['ReleaseComicID'] is not None or manchk['ReleaseComicID'] is not None: #if it exists, then it's a pre-existing add.
# #print str(manchk['ReleaseComicID']), comic['ComicName'], str(SeriesYear), str(comicid)
# manualAnnual(manchk['ReleaseComicID'], comic['ComicName'], SeriesYear, comicid)
# annualids.append(manchk['ReleaseComicID'])
#
# annualcomicname = re.sub('[\,\:]', '', comic['ComicName'])
#
##----- CBDB (outdated)
## annuals = comicbookdb.cbdb(annualcomicname, SeriesYear)
## print ("Number of Annuals returned: " + str(annuals['totalissues']))
## nb = 0
## while (nb <= int(annuals['totalissues'])):
## try:
## annualval = annuals['annualslist'][nb]
## except IndexError:
## break
##----
# #this issueid doesn't exist at this point since we got the data from cbdb...let's try and figure out
# #the issueID for CV based on what we know so we can use that ID (and thereby the metadata too)
#
# #other inherit issue - results below will return the ID for the Series of Annuals, not the series itself.
# #sr['comicid'] not the same as comicid for series.
# annComicName = annualcomicname + ' annual'
# mode = 'series'
# #if annuals['totalissues'] is None:
# # annissues = 0
# #else:
# # annissues = annuals['totalissues']
# #print "annissues :" + str(annissues)
#
# # annuals happen once / year. determine how many.
# annualyear = SeriesYear # no matter what, the year won't be less than this.
# #if annualval['AnnualYear'] is None:
# # sresults = mb.findComic(annComicName, mode, issue=annissues)
# #else:
# #sresults = mb.findComic(annComicName, mode, issue=annissues, limityear=annualval['AnnualYear'])
# #print "annualyear: " + str(annualval['AnnualYear'])
# annual_types_ignore = {'paperback', 'collecting', 'reprints', 'collected', 'print edition', 'tpb', 'available in print', 'collects'}
#
# logger.fdebug('[IMPORTER-ANNUAL] - Annual Year:' + str(annualyear))
# sresults, explicit = mb.findComic(annComicName, mode, issue=None, explicit='all')#,explicit=True)
# type='comic'
#
# if len(sresults) == 1:
# logger.fdebug('[IMPORTER-ANNUAL] - 1 result')
# if len(sresults) > 0:
# logger.fdebug('[IMPORTER-ANNUAL] - there are ' + str(len(sresults)) + ' results.')
# num_res = 0
# while (num_res < len(sresults)):
# sr = sresults[num_res]
# #logger.fdebug("description:" + sr['description'])
# if any(x in sr['description'].lower() for x in annual_types_ignore):
# logger.fdebug('[IMPORTER-ANNUAL] - tradeback/collected edition detected - skipping ' + str(sr['comicid']))
# else:
# if comicid in sr['description']:
# logger.fdebug('[IMPORTER-ANNUAL] - ' + str(comicid) + ' found. Assuming it is part of the greater collection.')
# issueid = sr['comicid']
# logger.fdebug('[IMPORTER-ANNUAL] - ' + str(issueid) + ' added to series list as an Annual')
# if issueid in annualids:
# logger.fdebug('[IMPORTER-ANNUAL] - ' + str(issueid) + ' already exists & was refreshed.')
# num_res+=1 # need to manually increment since not a for-next loop
# continue
# issued = cv.getComic(issueid, 'issue')
# if len(issued) is None or len(issued) == 0:
# logger.fdebug('[IMPORTER-ANNUAL] - Could not find any annual information...')
# pass
# else:
# n = 0
# if int(sr['issues']) == 0 and len(issued['issuechoice']) == 1:
# sr_issues = 1
# else:
# sr_issues = sr['issues']
# logger.fdebug('[IMPORTER-ANNUAL (MAIN)] - There are ' + str(sr_issues) + ' annuals in this series.')
# while (n < int(sr_issues)):
# try:
# firstval = issued['issuechoice'][n]
# except IndexError:
# break
# try:
# cleanname = helpers.cleanName(firstval['Issue_Name'])
# except:
# cleanname = 'None'
# issid = str(firstval['Issue_ID'])
# issnum = str(firstval['Issue_Number'])
# issname = cleanname
# issdate = str(firstval['Issue_Date'])
# stdate = str(firstval['Store_Date'])
# int_issnum = helpers.issuedigits(issnum)
# newCtrl = {"IssueID": issid}
# newVals = {"Issue_Number": issnum,
# "Int_IssueNumber": int_issnum,
# "IssueDate": issdate,
# "ReleaseDate": stdate,
# "IssueName": issname,
# "ComicID": comicid,
# "ComicName": comic['ComicName'],
# "ReleaseComicID": re.sub('4050-', '', firstval['Comic_ID']).strip(),
# "ReleaseComicName": sr['name'],
# "Status": "Skipped"}
# myDB.upsert("annuals", newVals, newCtrl)
#
# if issuechk is not None and issuetype == 'annual':
# logger.fdebug('[IMPORTER-ANNUAL] - Comparing annual ' + str(issuechk) + ' .. to .. ' + str(int_issnum))
# if issuechk == int_issnum:
# weeklyissue_check.append({"Int_IssueNumber": int_issnum,
# "Issue_Number": issnum,
# "IssueDate": issdate,
# "ReleaseDate": stdate})
#
# n+=1
# num_res+=1
#
# elif len(sresults) == 0 or len(sresults) is None:
# logger.fdebug('[IMPORTER-ANNUAL] - No results, removing the year from the agenda and re-querying.')
# sresults, explicit = mb.findComic(annComicName, mode, issue=None)#, explicit=True)
# if len(sresults) == 1:
# sr = sresults[0]
# logger.fdebug('[IMPORTER-ANNUAL] - ' + str(comicid) + ' found. Assuming it is part of the greater collection.')
# else:
# resultset = 0
# else:
# logger.fdebug('[IMPORTER-ANNUAL] - Returning results to screen - more than one possibility')
# for sr in sresults:
# if annualyear < sr['comicyear']:
# logger.fdebug('[IMPORTER-ANNUAL] - ' + str(annualyear) + ' is less than ' + str(sr['comicyear']))
# if int(sr['issues']) > (2013 - int(sr['comicyear'])):
# logger.fdebug('[IMPORTER-ANNUAL] - Issue count is wrong')
#
# #newCtrl = {"IssueID": issueid}
# #newVals = {"Issue_Number": annualval['AnnualIssue'],
# # "IssueDate": annualval['AnnualDate'],
# # "IssueName": annualval['AnnualTitle'],
# # "ComicID": comicid,
# # "Status": "Skipped"}
# #myDB.upsert("annuals", newVals, newCtrl)
# #nb+=1
if any([oldcomversion is None, oldcomversion == "None"]):
logger.info('Previous version detected as None - seeing if update required')
if comic['ComicVersion'].isdigit():
comicVol = 'v' + comic['ComicVersion']
logger.info('Updated version to :' + str(comicVol))
if all([mylar.SETDEFAULTVOLUME is False, comicVol == 'v1']):
comicVol = None
else:
if mylar.SETDEFAULTVOLUME is True:
comicVol = 'v1'
else:
comicVol = None
else:
comicVol = oldcomversion
if all([mylar.SETDEFAULTVOLUME is True, comicVol is None]):
comicVol = 'v1'
else:
comicVol = None
#parseit.annualCheck(gcomicid=gcdinfo['GCDComicID'], comicid=comicid, comicname=comic['ComicName'], comicyear=SeriesYear)
#comic book location on machine
# setup default location here
u_comicnm = comic['ComicName']
# let's remove the non-standard characters here that will break filenaming / searching.
comicname_filesafe = helpers.filesafe(u_comicnm)
@ -338,11 +209,10 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
publisher = re.sub('!', '', comic['ComicPublisher']) # thanks Boom!
publisher = helpers.filesafe(publisher)
year = SeriesYear
comversion = comic['ComicVersion']
if comversion is None:
comversion = 'None'
if comicVol is None:
comicVol = 'None'
#if comversion is None, remove it so it doesn't populate with 'None'
if comversion == 'None':
if comicVol == 'None':
chunk_f_f = re.sub('\$VolumeN', '', mylar.FOLDER_FORMAT)
chunk_f = re.compile(r'\s+')
chunk_folder_format = chunk_f.sub(' ', chunk_f_f)
@ -359,7 +229,7 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
'$series': series.lower(),
'$publisher': publisher.lower(),
'$VolumeY': 'V' + str(year),
'$VolumeN': comversion,
'$VolumeN': comicVol.upper(),
'$Annual': 'Annual'
}
@ -426,7 +296,6 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
logger.info('Attempting to retrieve the comic image for series')
try:
r = requests.get(comic['ComicImage'], params=None, stream=True, verify=mylar.CV_VERIFY, headers=mylar.CV_HEADERS)
except Exception, e:
logger.warn('Unable to download image from CV URL link: ' + comic['ComicImage'] + ' [Status Code returned: ' + str(r.status_code) + ']')
@ -463,7 +332,6 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
logger.info('Attempting to retrieve alternate comic image for the series.')
try:
r = requests.get(comic['ComicImageALT'], params=None, stream=True, verify=mylar.CV_VERIFY, headers=mylar.CV_HEADERS)
except Exception, e:
logger.warn('Unable to download image from CV URL link: ' + comic['ComicImageALT'] + ' [Status Code returned: ' + str(r.status_code) + ']')
@ -500,16 +368,6 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
except IOError as e:
logger.error('Unable to save cover (' + str(coverfile) + ') into series directory (' + str(comiclocal) + ') at this time.')
if oldcomversion is None or oldcomversion == "None":
logger.info('previous version detected as None - seeing if update required')
if comic['ComicVersion'].isdigit():
comicVol = "v" + comic['ComicVersion']
logger.info('updated version to :' + str(comicVol))
else:
comicVol = None
else:
comicVol = oldcomversion
#for description ...
#Cdesc = helpers.cleanhtml(comic['ComicDescription'])
#cdes_find = Cdesc.find("Collected")

View File

@ -269,7 +269,11 @@ def torrents(pickfeed=None, seriesname=None, issue=None, feedinfo=None):
issue = feedme.entries[i].title[iss_st +3:].strip()
#logger.fdebug('issue # : ' + str(issue))
justdigits = feedme.entries[i].torrent_contentlength
try:
justdigits = feedme.entries[i].torrent_contentlength
except:
justdigits = '0'
seeddigits = 0
if int(mylar.MINSEEDS) >= int(seeddigits):

View File

@ -419,7 +419,8 @@ class WebInterface(object):
iss_arcids = []
for issarc in arc_chk:
iss_arcids.append({"IssueArcID": issarc['IssueArcID'],
"IssueID": issarc['IssueID']})
"IssueID": issarc['IssueID'],
"Manual": issarc['Manual']})
arcinfo = mb.storyarcinfo(cvarcid)
if len(arcinfo) > 1:
arclist = arcinfo['arclist']
@ -465,10 +466,14 @@ class WebInterface(object):
cidlist += '|' + str(comicid)
#don't recreate the st_issueid if it's a refresh and the issueid already exists (will create duplicates otherwise)
st_issueid = None
manual_mod = None
if arcrefresh:
for aid in iss_arcids:
if aid['IssueID'] == issid:
st_issueid = aid['IssueArcID']
manual_mod = aid['Manual']
break
if st_issueid is None:
st_issueid = str(storyarcid) + "_" + str(random.randint(1000,9999))
issnum = arcval['Issue_Number']
@ -503,7 +508,8 @@ class WebInterface(object):
"IssueDate": issdate,
"ReleaseDate": storedate,
"ReadingOrder": readingorder, #n +1,
"Int_IssueNumber": int_issnum})
"Int_IssueNumber": int_issnum,
"Manual": manual_mod})
n+=1
comicid_results = mylar.cv.getComic(comicid=None, type='comicyears', comicidlist=cidlist)
@ -547,7 +553,8 @@ class WebInterface(object):
"SeriesYear": seriesYear,
"IssuePublisher": issuePublisher,
"CV_ArcID": arcid,
"Int_IssueNumber": AD['Int_IssueNumber']}
"Int_IssueNumber": AD['Int_IssueNumber'],
"Manual": AD['Manual']}
myDB.upsert("readinglist", newVals, newCtrl)
@ -2369,7 +2376,7 @@ class WebInterface(object):
arclist = []
alist = myDB.select("SELECT * from readinglist WHERE ComicName is not Null group by StoryArcID") #COLLATE NOCASE")
for al in alist:
totalissues = myDB.select("SELECT COUNT(*) as count from readinglist WHERE StoryARcID=?", [al['StoryArcID']])
totalissues = myDB.select("SELECT COUNT(*) as count from readinglist WHERE StoryARcID=? AND NOT Manual is 'deleted'", [al['StoryArcID']])
havecnt = myDB.select("SELECT COUNT(*) as count FROM readinglist WHERE StoryArcID=? AND (Status='Downloaded' or Status='Archived')", [al['StoryArcID']])
havearc = havecnt[0][0]
@ -2399,12 +2406,14 @@ class WebInterface(object):
return serve_template(templatename="storyarc.html", title="Story Arcs", arclist=arclist, delete_type=0)
storyarc_main.exposed = True
def detailStoryArc(self, StoryArcID, StoryArcName):
def detailStoryArc(self, StoryArcID, StoryArcName=None):
myDB = db.DBConnection()
arcinfo = myDB.select("SELECT * from readinglist WHERE StoryArcID=? order by ReadingOrder ASC", [StoryArcID])
arcinfo = myDB.select("SELECT * from readinglist WHERE StoryArcID=? and NOT Manual IS 'deleted' order by ReadingOrder ASC", [StoryArcID])
try:
cvarcid = arcinfo[0]['CV_ArcID']
arcpub = arcinfo[0]['Publisher']
if StoryArcName is None:
StoryArcName = arcinfo[0]['StoryArc']
lowyear = 9999
maxyear = 0
issref = []
@ -2443,13 +2452,95 @@ class WebInterface(object):
sdir = mylar.GRABBAG_DIR
if len(issref) > 0:
logger.info(issref)
helpers.updatearc_locs(StoryArcID, issref)
arcinfo = myDB.select("SELECT * from readinglist WHERE StoryArcID=? order by ReadingOrder ASC", [StoryArcID])
arcinfo = myDB.select("SELECT * from readinglist WHERE StoryArcID=? AND NOT Manual IS 'deleted' order by ReadingOrder ASC", [StoryArcID])
return serve_template(templatename="storyarc_detail.html", title="Detailed Arc list", readlist=arcinfo, storyarcname=StoryArcName, storyarcid=StoryArcID, cvarcid=cvarcid, sdir=sdir)
detailStoryArc.exposed = True
def order_edit(self, id, value):
storyarcid = id[:id.find('.')]
issuearcid = id[id.find('.') +1:]
readingorder = value
#readingorder = value
valid_readingorder = None
#validate input here for reading order.
try:
if int(readingorder) > 0:
valid_readingorder = int(readingorder)
except ValueError:
logger.error('Non-Numeric/Negative readingorder submitted. Rejecting due to sequencing error.')
return
if valid_readingorder is None:
logger.error('invalid readingorder supplied. Rejecting due to sequencing error')
return
myDB = db.DBConnection()
readchk = myDB.select("SELECT * FROM readinglist WHERE StoryArcID=? AND NOT Manual is 'deleted' ORDER BY ReadingOrder", [storyarcid])
if readchk is None:
logger.error('Cannot edit this for some reason (Cannot locate Storyarc) - something is wrong.')
return
new_readorder = []
for rc in readchk:
if issuearcid == rc['IssueArcID']:
oldreadorder = int(rc['ReadingOrder'])
new_readorder.append({'IssueArcID': issuearcid,
'IssueID': rc['IssueID'],
'ReadingOrder': valid_readingorder})
else:
if int(rc['ReadingOrder']) >= valid_readingorder:
reading_seq = int(rc['ReadingOrder']) + 1
else:
reading_seq = int(rc['ReadingOrder']) - 1
if reading_seq == 0:
reading_seq = 1
new_readorder.append({'IssueArcID': rc['IssueArcID'],
'IssueID': rc['IssueID'],
'ReadingOrder': reading_seq})
#we resequence in the following way:
# everything before the new reading number stays the same
# everything after the new reading order gets incremented
# add in the new reading order at the desired sequence
# check for empty spaces (missing numbers in sequence) and fill them in.
logger.fdebug(new_readorder)
newrl = 0
for rl in sorted(new_readorder, key=itemgetter('ReadingOrder'), reverse=False):
if rl['ReadingOrder'] - 1 != newrl:
rorder = newrl + 1
logger.fdebug(rl['IssueID'] + ' - changing reading order seq to : ' + str(rorder))
else:
rorder = rl['ReadingOrder']
logger.fdebug(rl['IssueID'] + ' - setting reading order seq to : ' + str(rorder))
rl_ctrl = {"IssueID": rl['IssueID'],
"IssueArcID": rl['IssueArcID'],
"StoryArcID": storyarcid}
r1_new = {"ReadingOrder": rorder}
newrl = rorder
myDB.upsert("readinglist", r1_new, rl_ctrl)
logger.info('Updated Issue Date for issue #' + str(issuenumber))
return value
order_edit.exposed = True
def manual_arc_add(self, manual_issueid, manual_readingorder, storyarcid, x=None, y=None):
logger.fdebug('IssueID to be attached : ' + str(manual_issueid))
logger.fdebug('StoryArcID : ' + str(storyarcid))
logger.fdebug('Reading Order # : ' + str(manual_readingorder))
threading.Thread(target=helpers.manualArc, args=[manual_issueid, manual_readingorder, storyarcid]).start()
raise cherrypy.HTTPRedirect("detailStoryArc?StoryArcID=%s" % storyarcid)
manual_arc_add.exposed = True
def markreads(self, action=None, **args):
sendtablet_queue = []
myDB = db.DBConnection()
@ -2488,7 +2579,7 @@ class WebInterface(object):
markreads.exposed = True
def removefromreadlist(self, IssueID=None, StoryArcID=None, IssueArcID=None, AllRead=None, ArcName=None, delete_type=None):
def removefromreadlist(self, IssueID=None, StoryArcID=None, IssueArcID=None, AllRead=None, ArcName=None, delete_type=None, manual=None):
myDB = db.DBConnection()
if IssueID:
myDB.action('DELETE from readlist WHERE IssueID=?', [IssueID])
@ -2508,7 +2599,11 @@ class WebInterface(object):
myDB.action('DELETE from nzblog WHERE IssueID LIKE ?', [stid])
logger.info("[DELETE-ARC] Removed " + str(StoryArcID) + " from Story Arcs.")
elif IssueArcID:
myDB.action('DELETE from readinglist WHERE IssueArcID=?', [IssueArcID])
if manual == 'added':
myDB.action('DELETE from readinglist WHERE IssueArcID=?', [IssueArcID])
else:
myDB.upsert("readinglist", {"Manual": 'deleted'}, {"IssueArcID": IssueArcID})
#myDB.action('DELETE from readinglist WHERE IssueArcID=?', [IssueArcID])
logger.info("[DELETE-ARC] Removed " + str(IssueArcID) + " from the Story Arc.")
elif AllRead:
myDB.action("DELETE from readlist WHERE Status='Read'")
@ -2780,6 +2875,9 @@ class WebInterface(object):
sarc_title = None
showonreadlist = 1 # 0 won't show storyarcissues on readinglist main page, 1 will show
for arc in ArcWatch:
if arc['Manual'] == 'deleted':
continue
sarc_title = arc['StoryArc']
logger.fdebug('[' + arc['StoryArc'] + '] ' + arc['ComicName'] + ' : ' + arc['IssueNumber'])