1
0
Fork 0
mirror of https://github.com/evilhero/mylar synced 2024-12-23 00:02:38 +00:00

IMP: Added 'Download' button to Weekly Pullist that will Download all 'Downloaded' Comics from the pullist into either the grabbag_dir or a weekly_directory

This commit is contained in:
evilhero 2013-05-04 00:40:32 -04:00
parent c3e305118f
commit 19eee4d913
3 changed files with 59 additions and 12 deletions

View file

@ -1,7 +1,9 @@
<%inherit file="base.html"/>
<%!
from mylar import db
import mylar
from mylar import db
from mylar.helpers import checked
%>
<%def name="headerIncludes()">
@ -9,7 +11,8 @@
<div id="subhead_menu">
<a id="menu_link_refresh" href="manualpull">Refresh Pull-list</a>
<a id="menu_link_delete" href="pullrecreate">Recreate Pull-list</a>
<a id="menu_link_refresh" href="MassWeeklyDownload">Downloaded to Dir</a>
<a id="menu_link_scan" class="button">Download</a>
<!-- <a id="menu_link_refresh" onclick="doAjaxCall('MassWeeklyDownload?pulldate=${pulldate}, $(this)),'table'" href="#" data-success="Now Downloading Comics to : ${mylar.GRABBAG_DIR}">Download.</a> -->
</div>
</div>
<a href="home" class="back">&laquo; Back to overview</a>
@ -20,6 +23,20 @@
<h1>Weekly Pull list for : ${pulldate}</h1>
</div>
<div>
<form action="MassWeeklyDownload" method="GET" id="MassDownload">
<fieldset>
<div class="row">
<input type="checkbox" name="weekfolder" id="weekfolder" value="1" ${checked(mylar.WEEKFOLDER)} /><label>Store in Weekly Directory</label>
<small>Create ${weekfold}</small>
</div>
<input type="hidden" name="pulldate" value=${pulldate}>
<input type="submit" style="display:none" />
</fieldset>
</form>
</div>
<div class="table_wrapper">
<table class="display" id="pull_table">
@ -68,7 +85,13 @@
<%def name="javascriptIncludes()">
<script src="js/libs/jquery.dataTables.min.js"></script>
<script type="text/javascript">
$("#menu_link_scan").click(function() {
$('#MassDownload').submit();
return true;
});
</script>
<script>
function initThisPage() {

View file

@ -208,6 +208,7 @@ HIGHCOUNT = 0
READ2FILENAME = 0
CVAPIFIX = 0
CVURL = None
WEEKFOLDER = 0
def CheckSection(sec):
""" Check if INI section exists, if not create it """
@ -269,7 +270,7 @@ def initialize():
RAW, RAW_PROVIDER, RAW_USERNAME, RAW_PASSWORD, RAW_GROUPS, EXPERIMENTAL, \
PROWL_ENABLED, PROWL_PRIORITY, PROWL_KEYS, PROWL_ONSNATCH, NMA_ENABLED, NMA_APIKEY, NMA_PRIORITY, NMA_ONSNATCH, PUSHOVER_ENABLED, PUSHOVER_PRIORITY, PUSHOVER_APIKEY, PUSHOVER_USERKEY, PUSHOVER_ONSNATCH, \
PREFERRED_QUALITY, MOVE_FILES, RENAME_FILES, LOWERCASE_FILENAMES, USE_MINSIZE, MINSIZE, USE_MAXSIZE, MAXSIZE, CORRECT_METADATA, FOLDER_FORMAT, FILE_FORMAT, REPLACE_CHAR, REPLACE_SPACES, ADD_TO_CSV, CVINFO, LOG_LEVEL, POST_PROCESSING, SEARCH_DELAY, GRABBAG_DIR, READ2FILENAME, CVURL, CVAPIFIX, \
COMIC_LOCATION, QUAL_ALTVERS, QUAL_SCANNER, QUAL_TYPE, QUAL_QUALITY, ENABLE_EXTRA_SCRIPTS, EXTRA_SCRIPTS, ENABLE_PRE_SCRIPTS, PRE_SCRIPTS, PULLNEW, COUNT_ISSUES, COUNT_HAVES, COUNT_COMICS, SYNO_FIX, CHMOD_FILE, CHMOD_DIR, ANNUALS_ON, CV_ONLY, CV_ONETIMER
COMIC_LOCATION, QUAL_ALTVERS, QUAL_SCANNER, QUAL_TYPE, QUAL_QUALITY, ENABLE_EXTRA_SCRIPTS, EXTRA_SCRIPTS, ENABLE_PRE_SCRIPTS, PRE_SCRIPTS, PULLNEW, COUNT_ISSUES, COUNT_HAVES, COUNT_COMICS, SYNO_FIX, CHMOD_FILE, CHMOD_DIR, ANNUALS_ON, CV_ONLY, CV_ONETIMER, WEEKFOLDER
if __INITIALIZED__:
return False
@ -346,6 +347,7 @@ def initialize():
if not GRABBAG_DIR:
#default to ComicLocation
GRABBAG_DIR = DESTINATION_DIR
WEEKFOLDER = bool(check_setting_int(CFG, 'General', 'weekfolder', 0))
CVAPIFIX = bool(check_setting_int(CFG, 'General', 'cvapifix', 0))
if CVAPIFIX is None:
CVAPIFIX = 0
@ -709,7 +711,7 @@ def config_write():
new_config['General']['enable_pre_scripts'] = int(ENABLE_PRE_SCRIPTS)
new_config['General']['pre_scripts'] = PRE_SCRIPTS
new_config['General']['post_processing'] = int(POST_PROCESSING)
new_config['General']['weekfolder'] = int(WEEKFOLDER)
new_config['SABnzbd'] = {}
new_config['SABnzbd']['use_sabnzbd'] = int(USE_SABNZBD)

View file

@ -576,7 +576,8 @@ class WebInterface(object):
#raise cherrypy.HTTPRedirect("home")
else:
return self.manualpull()
return serve_template(templatename="weeklypull.html", title="Weekly Pull", weeklyresults=weeklyresults, pulldate=pulldate['SHIPDATE'], pullfilter=True)
weekfold = os.path.join(mylar.DESTINATION_DIR, pulldate['SHIPDATE'])
return serve_template(templatename="weeklypull.html", title="Weekly Pull", weeklyresults=weeklyresults, pulldate=pulldate['SHIPDATE'], pullfilter=True, weekfold=weekfold)
pullist.exposed = True
def filterpull(self):
@ -1123,26 +1124,48 @@ class WebInterface(object):
downloadLocal.exposed = True
def MassWeeklyDownload(self):
def MassWeeklyDownload(self, pulldate, weekfolder=0):
mylar.WEEKFOLDER = int(weekfolder)
mylar.config_write()
# this will download all downloaded comics from the weekly pull list and throw them
# into a 'weekly' pull folder for those wanting to transfer directly to a 3rd party device.
myDB = db.DBConnection()
if mylar.WEEKFOLDER:
desdir = os.path.join(mylar.DESTINATION_DIR, pulldate)
if os.path.isdir(desdir):
logger.info(u"Directory (" + desdir + ") already exists! Continuing...")
else:
logger.info("Directory doesn't exist!")
try:
os.makedirs(desdir)
logger.info(u"Directory successfully created at: " + desdir)
except OSError:
logger.error(u"Could not create comicdir : " + desdir)
logger.error(u"Defaulting to : " + mylar.DESTINATION_DIR)
desdir = mylar.DESTINATION_DIR
else:
desdir = mylar.GRABBAG_DIR
clist = myDB.select("SELECT * FROM Weekly WHERE Status='Downloaded'")
if clist is None: # nothing on the list, just go go gone
logger.info("There aren't any issues downloaded from this week yet.")
else:
iscount = 0
for cl in clist:
cl['ComicID'] #downloaded & validated ComicID
isslist = myDB.select("SELECT * FROM Issues WHERE ComicID=? AND Status='Downloaded'", [cl['ComicID']])
if isslist is None: pass # no issues found for comicid - boo/boo
else:
for iss in isslist:
#go through issues downloaded until found one we want.
if iss['Issue_Number'] == cl['ISSUE']:
self.downloadLocal(iss['IssueID'], dir=mylar.GRABBAG_DIR)
logger.info("Copied " + iss['ComicName'] + " #" + str(iss['Issue_Number']) + " to " + dir )
self.downloadLocal(iss['IssueID'], dir=desdir)
logger.info("Copied " + iss['ComicName'] + " #" + str(iss['Issue_Number']) + " to " + desdir.encode('utf-8').strip() )
iscount+=1
break
logger.info("I have copied " + str(iscount) + " issues from this Week's pullist as requested.")
raise cherrypy.HTTPRedirect("pullist")
MassWeeklyDownload.exposed = True
#for testing.
@ -1433,7 +1456,6 @@ class WebInterface(object):
"nzbget_pass" : mylar.NZBGET_PASSWORD,
"nzbget_cat" : mylar.NZBGET_CATEGORY,
"nzbget_priority" : mylar.NZBGET_PRIORITY,
"use_blackhole" : helpers.checked(mylar.BLACKHOLE),
"blackhole_dir" : mylar.BLACKHOLE_DIR,
"usenet_retention" : mylar.USENET_RETENTION,