FIX: if 32p was disabled due to inability to signon, would still attempt to retrieve cached result when match occured which would then possibly result in excessive attempted logins, IMP: Added Force Type option to comicdetails page which will allow the type of series to be forced to TPB/GN format if Mylar is unable to decipher it under normal circumstances (doing this will affect how files are parsed for the given series), IMP: TPB/GN File Parser will now account for mutiple issues of the same TPB series within the given directory, IMP: When enabling/disabling Force Type, will rename existing folder to accomodate new naming folder convention if required, IMP: Started to move some of the file rename/manipulation modules into a filer.py module for more consistency, IMP: Added a to Folder Format which when triggered will use the designated Series Type, IMP:(#2130) When importing, option now available for the final series destination path to use the imported path location instead of defaulting to the Comic Location base folder - status will be no longer be Archived in such cases. Will also not show previous imported series on subsequent import runs if the import directory has not changed (thnx @CuddleBear92), IMP: Added the codebase for a Rename Preview option into the mix - GUI available, but not linked, FIX: Removed DEM from RSS scans when Public Torrents option is enabled, FIX:(#2137) When adding series, if series indicated date in a different format other than just numeric, would cause error

This commit is contained in:
evilhero 2018-12-10 12:27:53 -05:00
parent 5503a363ba
commit 75d77546fe
17 changed files with 949 additions and 178 deletions

View File

@ -109,10 +109,13 @@
<label><big>Status: </big><norm>${comic['Status']}</norm></label>
</div>
<%
if comic['Type'] == 'None' or comic['Type'] is None or comic['Type'] == 'Print':
if any([comic['Type'] == 'None', comic['Type'] is None, comic['Type'] == 'Print']) and comic['Corrected_Type'] != 'TPB':
comictype = 'Print'
else:
comictype = comic['Type']
if comic['Corrected_Type'] is not None:
comictype = comic['Corrected_Type']
else:
comictype = comic['Type']
%>
<div>
<label><big>Edition: </big><norm>${comictype}</norm>
@ -214,6 +217,11 @@
<input type="checkbox" style="vertical-align: bottom; margin: 3px; margin-top: -3px;" name="force_continuing" value="2" ${comicConfig['force_continuing']} />
<a href="#" title="Will forcibly mark this series as 'Continuing' regardless of actual status"><img src="interfaces/default/images/info32.png" height="16" alt="" /></a>
</div>
<div class="row checkbox right clearfix">
<label>Forcibly Mark series as TPB/GN</label>
<input type="checkbox" style="vertical-align: bottom; margin: 3px; margin-top: -3px;" name="force_type" value="2" ${comicConfig['force_type']} />
<a href="#" title="Will forcibly mark this series as TPB/GN in those instances where it assumes it's a normal issue-based series"><img src="interfaces/default/images/info32.png" height="16" alt="" /></a>
</div>
%if any([comic['ComicYear'] == '2099',comic['ComicYear'] == '0000', comic['ComicYear'] == '', comic['Corrected_SeriesYear']]):
<div class="row">
<label>Series Year</label>

View File

@ -1160,7 +1160,7 @@
<label>Folder Format</label>
<input type="text" name="folder_format" value="${config['folder_format']}" size="43">
<%
folder_options = "$Series = SeriesName\n$Year = SeriesYear\n$Annual = Annual (word)\n$VolumeY = V{SeriesYear}\n$VolumeN = V{Volume#}"
folder_options = "$Series = SeriesName\n$Year = SeriesYear\n$Annual = Annual (word)\n$VolumeY = V{SeriesYear}\n$VolumeN = V{Volume#}\n$Type = BookType (TPB/GN)"
%>
<a href="#" title="${folder_options}"><img src="interfaces/default/images/info32.png" height="16" alt="" /></a>
<small>Use: $Publisher, $Series, $Year<br />
@ -1170,7 +1170,7 @@
<label> File Format</label>
<input type="text" name="file_format" value="${config['file_format']}" size="43">
<%
file_options = "$Series = SeriesName\n$Year = IssueYear\n$Annual = Annual (word)\n$Issue = IssueNumber\n$VolumeY = V{SeriesYear}\n$VolumeN = V{Volume#}\n$month = publication month number\n$monthname = publication month name"
file_options = "$Series = SeriesName\n$Year = IssueYear\n$Annual = Annual (word)\n$Issue = IssueNumber\n$VolumeY = V{SeriesYear}\n$VolumeN = V{Volume#}\n$month = publication month number\n$monthname = publication month name\n$Type = BookType (TPB)"
%>
<a href="#" title="${file_options}"><img src="interfaces/default/images/info32.png" height="16" alt="" /></a>
<small>Use: $Series, $Year, $Issue<br />

View File

@ -30,20 +30,25 @@
<img src="interfaces/default/images/ultron.png" style="float:right" height="125" width="125" />
<fieldset>
<div class="row checkbox">
<input type="checkbox" name="autoadd" style="vertical-align: middle; margin: 3px; margin-top: -1px;" id="autoadd" value="1" ${checked(mylar.CONFIG.ADD_COMICS)}><label>Auto-add new series</label>
<input type="checkbox" disabled name="autoadd" style="vertical-align: middle; margin: 3px; margin-top: -1px;" id="autoadd" value="1" ${checked(mylar.CONFIG.ADD_COMICS)}><label>Auto-add new series</label>
</div>
<div class="row checkbox">
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_move" id="imp_move" value="1" ${checked(mylar.CONFIG.IMP_MOVE)}><label>Move files</label>
<input type="checkbox" disabled style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_move" id="imp_move" value="1" ${checked(mylar.CONFIG.IMP_MOVE)}><label>Move files</label>
</div>
%if mylar.CONFIG.IMP_PATHS is True:
<div class="row checkbox">
<input type="checkbox" disabled style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_paths" id="imp_paths" value="1" ${checked(mylar.CONFIG.IMP_PATHS)}><label>Series directories will be set to current Imported series paths</label>
</div>
%endif
%if mylar.CONFIG.RENAME_FILES:
<div class="row checkbox">
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_rename" id="imp_rename" value="1" ${checked(mylar.CONFIG.IMP_RENAME)}><label>Rename Files </label>
<input type="checkbox" disabled style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_rename" id="imp_rename" value="1" ${checked(mylar.CONFIG.IMP_RENAME)}><label>Rename Files </label>
<small>(After importing, Rename the files to configuration settings)</small>
<label>${mylar.CONFIG.FOLDER_FORMAT}/${mylar.CONFIG.FILE_FORMAT}</label>
</div>
%endif
<div class="row checkbox">
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_metadata" id="imp_metadata" value="1" ${checked(mylar.CONFIG.IMP_METADATA)}><label>Use Existing Metadata</label>
<input type="checkbox" disabled style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_metadata" id="imp_metadata" value="1" ${checked(mylar.CONFIG.IMP_METADATA)}><label>Use Existing Metadata</label>
<small>(Use existing Metadata to better locate series for import)</small>
</div>
%if mylar.IMPORTLOCK:

View File

@ -71,12 +71,15 @@
comicline = comicname
comictype = comic['ComicType']
comictype = comic['Type']
try:
if any([comictype == 'None', comictype is None, comictype == 'Print']):
if any([comictype == 'None', comictype is None, comictype == 'Print']) and comic['Corrected_Type'] != 'TPB':
comictype = None
else:
comictype = comictype
if comic['Corrected_Type'] is not None:
comictype = comic['Corrected_Type']
else:
comictype = comictype
except:
comictype = None

View File

@ -62,6 +62,7 @@
%endif
</div>
<form action="comicScan" method="GET" id="comicScan">
<fieldset>
<legend>Scan Comic Library</legend>
<p><strong>Where are the comics you want scanned in located?</strong></p>
<p>You can put in any directory, and it will scan for comics</br>
@ -73,35 +74,41 @@
</p>
<br/>
<div class="row">
<label for="">Path to directory</label>
%if mylar.CONFIG.COMIC_DIR:
<input type="text" value="${mylar.CONFIG.COMIC_DIR}" name="path" size="70" />
%else:
<input type="text" value="Enter a Comic Directory to scan" onfocus="if
(this.value==this.defaultValue) this.value='';" name="path" size="70" />
%endif
<label for="">Path to directory</label>
%if mylar.CONFIG.COMIC_DIR:
<input type="text" value="${mylar.CONFIG.COMIC_DIR}" name="path" size="70" />
%else:
<input type="text" value="Enter a Comic Directory to scan" onfocus="if
(this.value==this.defaultValue) this.value='';" name="path" size="70" />
%endif
</div>
<div class="row checkbox">
<input type="checkbox" name="autoadd" id="autoadd" value="1" ${checked(mylar.CONFIG.ADD_COMICS)}><label>Auto-add new series</label>
<input type="checkbox" name="autoadd" id="autoadd" value="1" ${checked(mylar.CONFIG.ADD_COMICS)} /><label>Auto-add new series</label>
</div>
<div class="row checkbox">
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_metadata" id="imp_metadata" value="1" ${checked(mylar.CONFIG.IMP_METADATA)}><label>Use existing Metadata</label>
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_metadata" id="imp_metadata" value="1" ${checked(mylar.CONFIG.IMP_METADATA)} /><label>Use existing Metadata</label>
<small>Use existing Metadata to better locate series for import</small>
</div>
<div class="row checkbox">
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_move" onclick="initConfigCheckbox($this));" id="imp_move" value="1" ${checked(mylar.CONFIG.IMP_MOVE)}><label>Move files into corresponding Series directory</label>
<small>Leaving this unchecked will not move anything, but will mark the issues as Archived</small>
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_move" id="imps" value="1" ${checked(mylar.CONFIG.IMP_MOVE)} /><label>Move files</label>
<small>Unchecked will not move anything, but will mark the issues as Archived</small>
</br>
</div>
<div class="config">
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_rename" id="imp_rename" value="1" ${checked(mylar.CONFIG.IMP_RENAME)}><label>Rename Files </label>
<small>Rename files to configuration settings</small>
<div id="move_options" class="row checkbox">
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_rename" id="imp_rename" value="1" ${checked(mylar.CONFIG.IMP_RENAME)} /><label>Rename Files</label>
<small>Rename files to configuration settings</small>
</div>
<div id="path_options" class="row checkbox">
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_paths" id="imp_paths" value="1" ${checked(mylar.CONFIG.IMP_PATHS)} /><label>Set import paths to series location paths</label>
<small>Use the folder paths during import as the series location path</small>
</div>
<br/>
<input type="button" value="Save Changes and Scan" onclick="addScanAction();doAjaxCall('comicScan',$(this),'tabs',true);return true;" data-success="Import Scan now submitted." data-error="Unable to start the scan. Check the logs.">
<input type="button" value="Save Changes without Scanning Library" onclick="doAjaxCall('comicScan',$(this),'tabs',true);return false;" data-success="Changes Saved Successfully" data-error="Unable to save settings. Check the logs.">
<input type="button" value="Save Changes and Scan" onclick="addScanAction();doAjaxCall('comicScan',$(this),'tabs',true);return true;" data-success="Import Scan now submitted." data-error="Unable to start the scan. Check the logs." />
<input type="button" value="Save Changes without Scanning Library" onclick="doAjaxCall('comicScan',$(this),'tabs',true);return false;" data-success="Changes Saved Successfully" data-error="Unable to save settings. Check the logs." />
%if mylar.IMPORTBUTTON:
<input type="button" value="Import Results Management" style="float: right;" onclick="location.href='importResults';" />
%endif
</fieldset>
</form>
</div>
<div id="tabs-2" class="configtable">
@ -278,9 +285,31 @@
}
};
function initThisPage() {
if ($("#imps").is(":checked"))
{
$("#move_options").show();
$("#path_options").hide();
}
else
{
$("#path_options").show();
$("#move_options").hide();
}
$("#imps").click(function(){
if ($("#imps").is(":checked"))
{
$("#move_options").slideDown();
$("#path_options").slideUp();
}
else
{
$("#path_options").slideDown();
$("#move_options").slideUp();
}
});
jQuery( "#tabs" ).tabs();
initActions();
initConfigCheckbox("#imp_move");
startTime();
};
$(document).ready(function() {

View File

@ -0,0 +1,45 @@
<%inherit file="base.html"/>
<%
import mylar
%>
<%def name="headIncludes()">
</%def>
<%def name="body()">
<div id="paddingheader">
<h1 class="clearfix">${title}</h1></br>
</div>
<div style="position:relative; width:960px; height:0px; margin:10px auto;">
<%
format_len = len(file_format)
%>
<form action="previewRename" type="GET">
<div style="position:absolute; top:-20px; right:0px;">
<label>File Format (Applied):</label>
<input type="text" name="file_format" value="${file_format}" size="${format_len}"><input type="image" src="interfaces/default/images/submit.png" height="25" width="25" class="highqual" />
<input type="hidden" name="comicid" value="${comicid}" />
</div>
</form>
</div>
<div>
<table cellpadding="5" cellspacing="5">
<thead>
<tr>
<th id="issueid">IssueID</th>
<th id="originalname" width="500" style="text-align:center">Original Name</th>
<th id="renamed" width="500" style="text-align:center">Renamed</th>
</tr>
</thead>
<tbody>
%for ti in resultlist:
<tr>
<td id="issueid">${ti['issueid']}</td>
<td id="originalname" width="500" style="text-align:center">${ti['original']}</td>
<td id="renamed" width="500" style="text-align:center">${ti['new']}</td>
</tr>
%endfor
</tbody>
</table>
</div>
</%def>

View File

@ -343,27 +343,29 @@ def start():
SCHED_RSS_LAST = monitors['rss']
# Start our scheduled background tasks
SCHED.add_job(func=updater.dbUpdate, id='dbupdater', name='DB Updater', args=[None,None,True], trigger=IntervalTrigger(hours=5, minutes=5, timezone='UTC'))
if UPDATER_STATUS != 'Paused':
SCHED.add_job(func=updater.dbUpdate, id='dbupdater', name='DB Updater', args=[None,None,True], trigger=IntervalTrigger(hours=5, minutes=5, timezone='UTC'))
#let's do a run at the Wanted issues here (on startup) if enabled.
ss = searchit.CurrentSearcher()
if CONFIG.NZB_STARTUP_SEARCH:
SCHED.add_job(func=ss.run, id='search', next_run_time=datetime.datetime.utcnow(), name='Auto-Search', trigger=IntervalTrigger(hours=0, minutes=CONFIG.SEARCH_INTERVAL, timezone='UTC'))
else:
if SCHED_SEARCH_LAST is not None:
search_timestamp = float(SCHED_SEARCH_LAST)
logger.fdebug('[AUTO-SEARCH] Search last run @ %s' % datetime.datetime.utcfromtimestamp(search_timestamp))
if SEARCH_STATUS != 'Paused':
ss = searchit.CurrentSearcher()
if CONFIG.NZB_STARTUP_SEARCH:
SCHED.add_job(func=ss.run, id='search', next_run_time=datetime.datetime.utcnow(), name='Auto-Search', trigger=IntervalTrigger(hours=0, minutes=CONFIG.SEARCH_INTERVAL, timezone='UTC'))
else:
search_timestamp = helpers.utctimestamp() + (int(CONFIG.SEARCH_INTERVAL) *60)
if SCHED_SEARCH_LAST is not None:
search_timestamp = float(SCHED_SEARCH_LAST)
logger.fdebug('[AUTO-SEARCH] Search last run @ %s' % datetime.datetime.utcfromtimestamp(search_timestamp))
else:
search_timestamp = helpers.utctimestamp() + (int(CONFIG.SEARCH_INTERVAL) *60)
duration_diff = (helpers.utctimestamp() - search_timestamp)/60
if duration_diff >= int(CONFIG.SEARCH_INTERVAL):
logger.fdebug('[AUTO-SEARCH]Auto-Search set to a delay of one minute before initialization as it has been %s minutes since the last run' % duration_diff)
SCHED.add_job(func=ss.run, id='search', name='Auto-Search', trigger=IntervalTrigger(hours=0, minutes=CONFIG.SEARCH_INTERVAL, timezone='UTC'))
else:
search_diff = datetime.datetime.utcfromtimestamp(helpers.utctimestamp() + ((int(CONFIG.SEARCH_INTERVAL) * 60) - (duration_diff*60)))
logger.fdebug('[AUTO-SEARCH] Scheduling next run @ %s every %s minutes' % (search_diff, CONFIG.SEARCH_INTERVAL))
SCHED.add_job(func=ss.run, id='search', name='Auto-Search', next_run_time=search_diff, trigger=IntervalTrigger(hours=0, minutes=CONFIG.SEARCH_INTERVAL, timezone='UTC'))
duration_diff = (helpers.utctimestamp() - search_timestamp)/60
if duration_diff >= int(CONFIG.SEARCH_INTERVAL):
logger.fdebug('[AUTO-SEARCH]Auto-Search set to a delay of one minute before initialization as it has been %s minutes since the last run' % duration_diff)
SCHED.add_job(func=ss.run, id='search', name='Auto-Search', trigger=IntervalTrigger(hours=0, minutes=CONFIG.SEARCH_INTERVAL, timezone='UTC'))
else:
search_diff = datetime.datetime.utcfromtimestamp(helpers.utctimestamp() + ((int(CONFIG.SEARCH_INTERVAL) * 60) - (duration_diff*60)))
logger.fdebug('[AUTO-SEARCH] Scheduling next run @ %s every %s minutes' % (search_diff, CONFIG.SEARCH_INTERVAL))
SCHED.add_job(func=ss.run, id='search', name='Auto-Search', next_run_time=search_diff, trigger=IntervalTrigger(hours=0, minutes=CONFIG.SEARCH_INTERVAL, timezone='UTC'))
if all([CONFIG.ENABLE_TORRENTS, CONFIG.AUTO_SNATCH, OS_DETECT != 'Windows']) and any([CONFIG.TORRENT_DOWNLOADER == 2, CONFIG.TORRENT_DOWNLOADER == 4]):
logger.info('[AUTO-SNATCHER] Auto-Snatch of completed torrents enabled & attempting to background load....')
@ -420,17 +422,18 @@ def start():
ws = weeklypullit.Weekly()
duration_diff = (weektimestamp - weekly_timestamp)/60
if abs(duration_diff) >= weekly_interval/60:
logger.info('[WEEKLY] Weekly Pull-Update initializing immediately as it has been %s hours since the last run' % abs(duration_diff/60))
SCHED.add_job(func=ws.run, id='weekly', name='Weekly Pullist', next_run_time=datetime.datetime.utcnow(), trigger=IntervalTrigger(hours=weektimer, minutes=0, timezone='UTC'))
else:
weekly_diff = datetime.datetime.utcfromtimestamp(weektimestamp + (weekly_interval - (duration_diff * 60)))
logger.fdebug('[WEEKLY] Scheduling next run for @ %s every %s hours' % (weekly_diff, weektimer))
SCHED.add_job(func=ws.run, id='weekly', name='Weekly Pullist', next_run_time=weekly_diff, trigger=IntervalTrigger(hours=weektimer, minutes=0, timezone='UTC'))
if WEEKLY_STATUS != 'Paused':
if abs(duration_diff) >= weekly_interval/60:
logger.info('[WEEKLY] Weekly Pull-Update initializing immediately as it has been %s hours since the last run' % abs(duration_diff/60))
SCHED.add_job(func=ws.run, id='weekly', name='Weekly Pullist', next_run_time=datetime.datetime.utcnow(), trigger=IntervalTrigger(hours=weektimer, minutes=0, timezone='UTC'))
else:
weekly_diff = datetime.datetime.utcfromtimestamp(weektimestamp + (weekly_interval - (duration_diff * 60)))
logger.fdebug('[WEEKLY] Scheduling next run for @ %s every %s hours' % (weekly_diff, weektimer))
SCHED.add_job(func=ws.run, id='weekly', name='Weekly Pullist', next_run_time=weekly_diff, trigger=IntervalTrigger(hours=weektimer, minutes=0, timezone='UTC'))
#initiate startup rss feeds for torrents/nzbs here...
rs = rsscheckit.tehMain()
if CONFIG.ENABLE_RSS:
if CONFIG.ENABLE_RSS is True:
logger.info('[RSS-FEEDS] Initiating startup-RSS feed checks.')
if SCHED_RSS_LAST is not None:
rss_timestamp = float(SCHED_RSS_LAST)
@ -444,13 +447,16 @@ def start():
rss_diff = datetime.datetime.utcfromtimestamp(helpers.utctimestamp() + (int(CONFIG.RSS_CHECKINTERVAL) * 60) - (duration_diff * 60))
logger.fdebug('[RSS-FEEDS] Scheduling next run for @ %s every %s minutes' % (rss_diff, CONFIG.RSS_CHECKINTERVAL))
SCHED.add_job(func=rs.run, id='rss', name='RSS Feeds', args=[True], next_run_time=rss_diff, trigger=IntervalTrigger(hours=0, minutes=int(CONFIG.RSS_CHECKINTERVAL), timezone='UTC'))
#else:
else:
RSS_STATUS = 'Paused'
# SCHED.add_job(func=rs.run, id='rss', name='RSS Feeds', args=[True], trigger=IntervalTrigger(hours=0, minutes=int(CONFIG.RSS_CHECKINTERVAL), timezone='UTC'))
# SCHED.pause_job('rss')
if CONFIG.CHECK_GITHUB:
vs = versioncheckit.CheckVersion()
SCHED.add_job(func=vs.run, id='version', name='Check Version', trigger=IntervalTrigger(hours=0, minutes=CONFIG.CHECK_GITHUB_INTERVAL, timezone='UTC'))
else:
VERSION_STATUS = 'Paused'
##run checkFolder every X minutes (basically Manual Run Post-Processing)
if CONFIG.ENABLE_CHECK_FOLDER:
@ -460,6 +466,8 @@ def start():
SCHED.add_job(func=fm.run, id='monitor', name='Folder Monitor', trigger=IntervalTrigger(hours=0, minutes=int(CONFIG.DOWNLOAD_SCAN_INTERVAL), timezone='UTC'))
else:
logger.error('[FOLDER MONITOR] You need to specify a monitoring time for the check folder option to work')
else:
MONITOR_STATUS = 'Paused'
logger.info('Firing up the Background Schedulers now....')
try:

View File

@ -136,6 +136,7 @@ _CONFIG_DEFINITIONS = OrderedDict({
'ADD_COMICS': (bool, 'Import', False),
'COMIC_DIR': (str, 'Import', None),
'IMP_MOVE': (bool, 'Import', False),
'IMP_PATHS': (bool, 'Import', False),
'IMP_RENAME': (bool, 'Import', False),
'IMP_METADATA': (bool, 'Import', False), # should default to False - this is enabled for testing only.
@ -786,6 +787,12 @@ class Config(object):
logger.fdebug("Minimum RSS Interval Check delay set for 20 minutes to avoid hammering.")
self.RSS_CHECKINTERVAL = 20
if self.ENABLE_RSS is True and mylar.RSS_STATUS == 'Paused':
mylar.RSS_STATUS = 'Waiting'
elif self.ENABLE_RSS is False and mylar.RSS_STATUS == 'Waiting':
mylar.RSS_STATUS = 'Paused'
logger.info('self.enable_rss is %s [%s]' % (self.ENABLE_RSS, mylar.RSS_STATUS))
if not helpers.is_number(self.CHMOD_DIR):
logger.fdebug("CHMOD Directory value is not a valid numeric - please correct. Defaulting to 0777")
self.CHMOD_DIR = '0777'

560
mylar/filers.py Normal file
View File

@ -0,0 +1,560 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of Mylar.
#
# Mylar is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mylar is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mylar. If not, see <http://www.gnu.org/licenses/>.
import re
import os
import mylar
from mylar import helpers, db, logger
class FileHandlers(object):
def __init__(self, comic=None, issue=None, ComicID=None, IssueID=None):
self.myDB = db.DBConnection()
if ComicID is not None:
self.comicid = ComicID
self.comic = self.myDB.selectone('SELECT * FROM comics WHERE ComicID=?', [ComicID]).fetchone()
elif comic is not None:
self.comic = comic
self.comicid = None
else:
self.comic = None
self.comicid = None
if IssueID is not None:
self.issueid = IssueID
self.issue = self.myDB.select('SELECT * FROM issues WHERE IssueID=?', [IssueID])
elif issue is not None:
self.issue = issue
self.issueid = None
else:
self.issue = None
self.issueid = None
def folder_create(self, booktype=None):
# dictionary needs to passed called comic with {'ComicPublisher', 'CorrectedType, 'Type', 'ComicYear', 'ComicName', 'ComicVersion'}
# or pass in comicid value from __init__
# setup default location here
u_comicnm = self.comic['ComicName']
# let's remove the non-standard characters here that will break filenaming / searching.
comicname_filesafe = helpers.filesafe(u_comicnm)
comicdir = comicname_filesafe
series = comicdir
if series[-1:] == '.':
series[:-1]
publisher = re.sub('!', '', self.comic['ComicPublisher']) # thanks Boom!
publisher = helpers.filesafe(publisher)
if booktype is not None:
if self.comic['Corrected_Type'] is not None:
booktype = self.comic['Corrected_Type']
else:
booktype = booktype
else:
booktype = self.comic['Type']
if booktype == 'Print' or all([booktype != 'Print', mylar.CONFIG.FORMAT_BOOKTYPE is False]):
chunk_fb = re.sub('\$Type', '', mylar.CONFIG.FOLDER_FORMAT)
chunk_b = re.compile(r'\s+')
chunk_folder_format = chunk_b.sub(' ', chunk_fb)
else:
chunk_folder_format = mylar.CONFIG.FOLDER_FORMAT
if any([self.comic['ComicVersion'] is None, booktype != 'Print']):
comicVol = 'None'
else:
comicVol = self.comic['ComicVersion']
#if comversion is None, remove it so it doesn't populate with 'None'
if comicVol == 'None':
chunk_f_f = re.sub('\$VolumeN', '', chunk_folder_format)
chunk_f = re.compile(r'\s+')
chunk_folder_format = chunk_f.sub(' ', chunk_f_f)
logger.fdebug('No version # found for series, removing from folder format')
logger.fdebug("new folder format: " + str(chunk_folder_format))
#do work to generate folder path
values = {'$Series': series,
'$Publisher': publisher,
'$Year': self.comic['ComicYear'],
'$series': series.lower(),
'$publisher': publisher.lower(),
'$VolumeY': 'V' + self.comic['ComicYear'],
'$VolumeN': comicVol.upper(),
'$Annual': 'Annual',
'$Type': booktype
}
try:
if mylar.CONFIG.FOLDER_FORMAT == '':
comlocation = os.path.join(mylar.CONFIG.DESTINATION_DIR, comicdir, " (" + comic['SeriesYear'] + ")")
else:
chunk_folder_format = re.sub('[()|[]]', '', chunk_folder_format).strip()
comlocation = os.path.join(mylar.CONFIG.DESTINATION_DIR, helpers.replace_all(chunk_folder_format, values))
except Exception as e:
if 'TypeError' in e:
if mylar.CONFIG.DESTINATION_DIR is None:
logger.error('[ERROR] %s' % e)
logger.error('No Comic Location specified. This NEEDS to be set before anything can be added successfully.')
return
logger.error('[ERROR] %s' % e)
logger.error('Cannot determine Comic Location path properly. Check your Comic Location and Folder Format for any errors.')
return
if mylar.CONFIG.DESTINATION_DIR == "":
logger.error('There is no Comic Location Path specified - please specify one in Config/Web Interface.')
return
#enforce proper slashes here..
cnt1 = comlocation.count('\\')
cnt2 = comlocation.count('/')
if cnt1 > cnt2 and '/' in chunk_folder_format:
comlocation = re.sub('/', '\\', comlocation)
if mylar.CONFIG.REPLACE_SPACES:
#mylar.CONFIG.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
comlocation = comlocation.replace(' ', mylar.CONFIG.REPLACE_CHAR)
return comlocation
def rename_file(self, ofilename, issue=None, annualize=None, arc=False, file_format=None): #comicname, issue, comicyear=None, issueid=None)
comicid = self.comicid # it's coming in unicoded...
issueid = self.issueid
if file_format is None:
file_format = mylar.CONFIG.FILE_FORMAT
logger.fdebug(type(comicid))
logger.fdebug(type(issueid))
logger.fdebug('comicid: %s' % comicid)
logger.fdebug('issue# as per cv: %s' % issue)
logger.fdebug('issueid:' + str(issueid))
if issueid is None:
logger.fdebug('annualize is ' + str(annualize))
if arc:
#this has to be adjusted to be able to include story arc issues that span multiple arcs
chkissue = self.myDB.selectone("SELECT * from storyarcs WHERE ComicID=? AND Issue_Number=?", [comicid, issue]).fetchone()
else:
chkissue = self.myDB.selectone("SELECT * from issues WHERE ComicID=? AND Issue_Number=?", [comicid, issue]).fetchone()
if all([chkissue is None, annualize is None, not mylar.CONFIG.ANNUALS_ON]):
chkissue = self.myDB.selectone("SELECT * from annuals WHERE ComicID=? AND Issue_Number=?", [comicid, issue]).fetchone()
if chkissue is None:
#rechk chkissue against int value of issue #
if arc:
chkissue = self.myDB.selectone("SELECT * from storyarcs WHERE ComicID=? AND Int_IssueNumber=?", [comicid, issuedigits(issue)]).fetchone()
else:
chkissue = self.myDB.selectone("SELECT * from issues WHERE ComicID=? AND Int_IssueNumber=?", [comicid, issuedigits(issue)]).fetchone()
if all([chkissue is None, annualize == 'yes', mylar.CONFIG.ANNUALS_ON]):
chkissue = self.myDB.selectone("SELECT * from annuals WHERE ComicID=? AND Int_IssueNumber=?", [comicid, issuedigits(issue)]).fetchone()
if chkissue is None:
logger.error('Invalid Issue_Number - please validate.')
return
else:
logger.info('Int Issue_number compare found. continuing...')
issueid = chkissue['IssueID']
else:
issueid = chkissue['IssueID']
#use issueid to get publisher, series, year, issue number
logger.fdebug('issueid is now : ' + str(issueid))
if arc:
issueinfo = self.myDB.selectone("SELECT * from storyarcs WHERE ComicID=? AND IssueID=? AND StoryArc=?", [comicid, issueid, arc]).fetchone()
else:
issueinfo = self.myDB.selectone("SELECT * from issues WHERE ComicID=? AND IssueID=?", [comicid, issueid]).fetchone()
if issueinfo is None:
logger.fdebug('not an issue, checking against annuals')
issueinfo = self.myDB.selectone("SELECT * from annuals WHERE ComicID=? AND IssueID=?", [comicid, issueid]).fetchone()
if issueinfo is None:
logger.fdebug('Unable to rename - cannot locate issue id within db')
return
else:
annualize = True
if issueinfo is None:
logger.fdebug('Unable to rename - cannot locate issue id within db')
return
#remap the variables to a common factor.
if arc:
issuenum = issueinfo['IssueNumber']
issuedate = issueinfo['IssueDate']
publisher = issueinfo['IssuePublisher']
series = issueinfo['ComicName']
seriesfilename = series #Alternate FileNaming is not available with story arcs.
seriesyear = issueinfo['SeriesYear']
arcdir = helpers.filesafe(issueinfo['StoryArc'])
if mylar.CONFIG.REPLACE_SPACES:
arcdir = arcdir.replace(' ', mylar.CONFIG.REPLACE_CHAR)
if mylar.CONFIG.STORYARCDIR:
storyarcd = os.path.join(mylar.CONFIG.DESTINATION_DIR, "StoryArcs", arcdir)
logger.fdebug('Story Arc Directory set to : ' + storyarcd)
else:
logger.fdebug('Story Arc Directory set to : ' + mylar.CONFIG.GRABBAG_DIR)
storyarcd = os.path.join(mylar.CONFIG.DESTINATION_DIR, mylar.CONFIG.GRABBAG_DIR)
comlocation = storyarcd
comversion = None #need to populate this.
else:
issuenum = issueinfo['Issue_Number']
issuedate = issueinfo['IssueDate']
publisher = self.comic['ComicPublisher']
series = self.comic['ComicName']
if self.comic['AlternateFileName'] is None or self.comic['AlternateFileName'] == 'None':
seriesfilename = series
else:
seriesfilename = self.comic['AlternateFileName']
logger.fdebug('Alternate File Naming has been enabled for this series. Will rename series title to : ' + seriesfilename)
seriesyear = self.comic['ComicYear']
comlocation = self.comic['ComicLocation']
comversion = self.comic['ComicVersion']
unicodeissue = issuenum
if type(issuenum) == unicode:
vals = {u'\xbd':'.5',u'\xbc':'.25',u'\xbe':'.75',u'\u221e':'9999999999',u'\xe2':'9999999999'}
else:
vals = {'\xbd':'.5','\xbc':'.25','\xbe':'.75','\u221e':'9999999999','\xe2':'9999999999'}
x = [vals[key] for key in vals if key in issuenum]
if x:
issuenum = x[0]
logger.fdebug('issue number formatted: %s' % issuenum)
#comicid = issueinfo['ComicID']
#issueno = str(issuenum).split('.')[0]
issue_except = 'None'
issue_exceptions = ['AU',
'INH',
'NOW',
'AI',
'MU',
'A',
'B',
'C',
'X',
'O']
valid_spaces = ('.', '-')
for issexcept in issue_exceptions:
if issexcept.lower() in issuenum.lower():
logger.fdebug('ALPHANUMERIC EXCEPTION : [' + issexcept + ']')
v_chk = [v for v in valid_spaces if v in issuenum]
if v_chk:
iss_space = v_chk[0]
logger.fdebug('character space denoted as : ' + iss_space)
else:
logger.fdebug('character space not denoted.')
iss_space = ''
# if issexcept == 'INH':
# issue_except = '.INH'
if issexcept == 'NOW':
if '!' in issuenum: issuenum = re.sub('\!', '', issuenum)
# issue_except = '.NOW'
issue_except = iss_space + issexcept
logger.fdebug('issue_except denoted as : ' + issue_except)
issuenum = re.sub("[^0-9]", "", issuenum)
break
# if 'au' in issuenum.lower() and issuenum[:1].isdigit():
# issue_except = ' AU'
# elif 'ai' in issuenum.lower() and issuenum[:1].isdigit():
# issuenum = re.sub("[^0-9]", "", issuenum)
# issue_except = ' AI'
# elif 'inh' in issuenum.lower() and issuenum[:1].isdigit():
# issuenum = re.sub("[^0-9]", "", issuenum)
# issue_except = '.INH'
# elif 'now' in issuenum.lower() and issuenum[:1].isdigit():
# if '!' in issuenum: issuenum = re.sub('\!', '', issuenum)
# issuenum = re.sub("[^0-9]", "", issuenum)
# issue_except = '.NOW'
if '.' in issuenum:
iss_find = issuenum.find('.')
iss_b4dec = issuenum[:iss_find]
if iss_find == 0:
iss_b4dec = '0'
iss_decval = issuenum[iss_find +1:]
if iss_decval.endswith('.'):
iss_decval = iss_decval[:-1]
if int(iss_decval) == 0:
iss = iss_b4dec
issdec = int(iss_decval)
issueno = iss
else:
if len(iss_decval) == 1:
iss = iss_b4dec + "." + iss_decval
issdec = int(iss_decval) * 10
else:
iss = iss_b4dec + "." + iss_decval.rstrip('0')
issdec = int(iss_decval.rstrip('0')) * 10
issueno = iss_b4dec
else:
iss = issuenum
issueno = iss
# issue zero-suppression here
if mylar.CONFIG.ZERO_LEVEL == "0":
zeroadd = ""
else:
if mylar.CONFIG.ZERO_LEVEL_N == "none": zeroadd = ""
elif mylar.CONFIG.ZERO_LEVEL_N == "0x": zeroadd = "0"
elif mylar.CONFIG.ZERO_LEVEL_N == "00x": zeroadd = "00"
logger.fdebug('Zero Suppression set to : ' + str(mylar.CONFIG.ZERO_LEVEL_N))
prettycomiss = None
if issueno.isalpha():
logger.fdebug('issue detected as an alpha.')
prettycomiss = str(issueno)
else:
try:
x = float(issuenum)
#validity check
if x < 0:
logger.info('I\'ve encountered a negative issue #: %s. Trying to accomodate.' % issueno)
prettycomiss = '-' + str(zeroadd) + str(issueno[1:])
elif x == 9999999999:
logger.fdebug('Infinity issue found.')
issuenum = 'infinity'
elif x >= 0:
pass
else:
raise ValueError
except ValueError, e:
logger.warn('Unable to properly determine issue number [ %s] - you should probably log this on github for help.' % issueno)
return
if prettycomiss is None and len(str(issueno)) > 0:
#if int(issueno) < 0:
# self._log("issue detected is a negative")
# prettycomiss = '-' + str(zeroadd) + str(abs(issueno))
if int(issueno) < 10:
logger.fdebug('issue detected less than 10')
if '.' in iss:
if int(iss_decval) > 0:
issueno = str(iss)
prettycomiss = str(zeroadd) + str(iss)
else:
prettycomiss = str(zeroadd) + str(int(issueno))
else:
prettycomiss = str(zeroadd) + str(iss)
if issue_except != 'None':
prettycomiss = str(prettycomiss) + issue_except
logger.fdebug('Zero level supplement set to ' + str(mylar.CONFIG.ZERO_LEVEL_N) + '. Issue will be set as : ' + str(prettycomiss))
elif int(issueno) >= 10 and int(issueno) < 100:
logger.fdebug('issue detected greater than 10, but less than 100')
if mylar.CONFIG.ZERO_LEVEL_N == "none":
zeroadd = ""
else:
zeroadd = "0"
if '.' in iss:
if int(iss_decval) > 0:
issueno = str(iss)
prettycomiss = str(zeroadd) + str(iss)
else:
prettycomiss = str(zeroadd) + str(int(issueno))
else:
prettycomiss = str(zeroadd) + str(iss)
if issue_except != 'None':
prettycomiss = str(prettycomiss) + issue_except
logger.fdebug('Zero level supplement set to ' + str(mylar.CONFIG.ZERO_LEVEL_N) + '.Issue will be set as : ' + str(prettycomiss))
else:
logger.fdebug('issue detected greater than 100')
if issuenum == 'infinity':
prettycomiss = 'infinity'
else:
if '.' in iss:
if int(iss_decval) > 0:
issueno = str(iss)
prettycomiss = str(issueno)
if issue_except != 'None':
prettycomiss = str(prettycomiss) + issue_except
logger.fdebug('Zero level supplement set to ' + str(mylar.CONFIG.ZERO_LEVEL_N) + '. Issue will be set as : ' + str(prettycomiss))
elif len(str(issueno)) == 0:
prettycomiss = str(issueno)
logger.fdebug('issue length error - cannot determine length. Defaulting to None: ' + str(prettycomiss))
logger.fdebug('Pretty Comic Issue is : ' + str(prettycomiss))
if mylar.CONFIG.UNICODE_ISSUENUMBER:
logger.fdebug('Setting this to Unicode format as requested: %s' % prettycomiss)
prettycomiss = unicodeissue
issueyear = issuedate[:4]
month = issuedate[5:7].replace('-', '').strip()
month_name = helpers.fullmonth(month)
if month_name is None:
month_name = 'None'
logger.fdebug('Issue Year : ' + str(issueyear))
logger.fdebug('Publisher: ' + publisher)
logger.fdebug('Series: ' + series)
logger.fdebug('Year: ' + str(seriesyear))
logger.fdebug('Comic Location: ' + comlocation)
if self.comic['Corrected_Type'] is not None:
if self.comic['Type'] != self.comic['Corrected_Type']:
booktype = self.comic['Corrected_Type']
else:
booktype = self.comic['Type']
else:
booktype = self.comic['Type']
if booktype == 'Print' or all([booktype != 'Print', mylar.CONFIG.FORMAT_BOOKTYPE is False]):
chunk_fb = re.sub('\$Type', '', file_format)
chunk_b = re.compile(r'\s+')
chunk_file_format = chunk_b.sub(' ', chunk_fb)
else:
chunk_file_format = file_format
if any([comversion is None, booktype != 'Print']):
comversion = 'None'
#if comversion is None, remove it so it doesn't populate with 'None'
if comversion == 'None':
chunk_f_f = re.sub('\$VolumeN', '', chunk_file_format)
chunk_f = re.compile(r'\s+')
chunk_file_format = chunk_f.sub(' ', chunk_f_f)
logger.fdebug('No version # found for series, removing from filename')
logger.fdebug("new format: " + str(chunk_file_format))
if annualize is None:
chunk_f_f = re.sub('\$Annual', '', chunk_file_format)
chunk_f = re.compile(r'\s+')
chunk_file_format = chunk_f.sub(' ', chunk_f_f)
logger.fdebug('not an annual - removing from filename paramaters')
logger.fdebug('new format: ' + str(chunk_file_format))
else:
logger.fdebug('chunk_file_format is: ' + str(chunk_file_format))
if mylar.CONFIG.ANNUALS_ON:
if 'annual' in series.lower():
if '$Annual' not in chunk_file_format: # and 'annual' not in ofilename.lower():
#if it's an annual, but $annual isn't specified in file_format, we need to
#force it in there, by default in the format of $Annual $Issue
#prettycomiss = "Annual " + str(prettycomiss)
logger.fdebug('[%s][ANNUALS-ON][ANNUAL IN SERIES][NO ANNUAL FORMAT] prettycomiss: %s' % (series, prettycomiss))
else:
#because it exists within title, strip it then use formatting tag for placement of wording.
chunk_f_f = re.sub('\$Annual', '', chunk_file_format)
chunk_f = re.compile(r'\s+')
chunk_file_format = chunk_f.sub(' ', chunk_f_f)
logger.fdebug('[%s][ANNUALS-ON][ANNUAL IN SERIES][ANNUAL FORMAT] prettycomiss: %s' % (series, prettycomiss))
else:
if '$Annual' not in chunk_file_format: # and 'annual' not in ofilename.lower():
#if it's an annual, but $annual isn't specified in file_format, we need to
#force it in there, by default in the format of $Annual $Issue
prettycomiss = "Annual %s" % prettycomiss
logger.fdebug('[%s][ANNUALS-ON][ANNUAL NOT IN SERIES][NO ANNUAL FORMAT] prettycomiss: %s' % (series, prettycomiss))
else:
logger.fdebug('[%s][ANNUALS-ON][ANNUAL NOT IN SERIES][ANNUAL FORMAT] prettycomiss: %s' % (series, prettycomiss))
else:
#if annuals aren't enabled, then annuals are being tracked as independent series.
#annualize will be true since it's an annual in the seriesname.
if 'annual' in series.lower():
if '$Annual' not in chunk_file_format: # and 'annual' not in ofilename.lower():
#if it's an annual, but $annual isn't specified in file_format, we need to
#force it in there, by default in the format of $Annual $Issue
#prettycomiss = "Annual " + str(prettycomiss)
logger.fdebug('[%s][ANNUALS-OFF][ANNUAL IN SERIES][NO ANNUAL FORMAT] prettycomiss: %s' (series, prettycomiss))
else:
#because it exists within title, strip it then use formatting tag for placement of wording.
chunk_f_f = re.sub('\$Annual', '', chunk_file_format)
chunk_f = re.compile(r'\s+')
chunk_file_format = chunk_f.sub(' ', chunk_f_f)
logger.fdebug('[%s][ANNUALS-OFF][ANNUAL IN SERIES][ANNUAL FORMAT] prettycomiss: %s' % (series, prettycomiss))
else:
if '$Annual' not in chunk_file_format: # and 'annual' not in ofilename.lower():
#if it's an annual, but $annual isn't specified in file_format, we need to
#force it in there, by default in the format of $Annual $Issue
prettycomiss = "Annual %s" % prettycomiss
logger.fdebug('[%s][ANNUALS-OFF][ANNUAL NOT IN SERIES][NO ANNUAL FORMAT] prettycomiss: %s' % (series, prettycomiss))
else:
logger.fdebug('[%s][ANNUALS-OFF][ANNUAL NOT IN SERIES][ANNUAL FORMAT] prettycomiss: %s' % (series, prettycomiss))
logger.fdebug('Annual detected within series title of ' + series + '. Not auto-correcting issue #')
seriesfilename = seriesfilename.encode('ascii', 'ignore').strip()
filebad = [':', ',', '/', '?', '!', '\'', '\"', '\*'] #in u_comicname or '/' in u_comicname or ',' in u_comicname or '?' in u_comicname:
for dbd in filebad:
if dbd in seriesfilename:
if any([dbd == '/', dbd == '*']):
repthechar = '-'
else:
repthechar = ''
seriesfilename = seriesfilename.replace(dbd, repthechar)
logger.fdebug('Altering series name due to filenaming restrictions: ' + seriesfilename)
publisher = re.sub('!', '', publisher)
file_values = {'$Series': seriesfilename,
'$Issue': prettycomiss,
'$Year': issueyear,
'$series': series.lower(),
'$Publisher': publisher,
'$publisher': publisher.lower(),
'$VolumeY': 'V' + str(seriesyear),
'$VolumeN': comversion,
'$monthname': month_name,
'$month': month,
'$Annual': 'Annual',
'$Type': booktype
}
extensions = ('.cbr', '.cbz', '.cb7')
if ofilename.lower().endswith(extensions):
path, ext = os.path.splitext(ofilename)
if file_format == '':
logger.fdebug('Rename Files is not enabled - keeping original filename.')
#check if extension is in nzb_name - will screw up otherwise
if ofilename.lower().endswith(extensions):
nfilename = ofilename[:-4]
else:
nfilename = ofilename
else:
chunk_file_format = re.sub('[()|[]]', '', chunk_file_format).strip()
nfilename = helpers.replace_all(chunk_file_format, file_values)
if mylar.CONFIG.REPLACE_SPACES:
#mylar.CONFIG.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
nfilename = nfilename.replace(' ', mylar.CONFIG.REPLACE_CHAR)
nfilename = re.sub('[\,\:]', '', nfilename) + ext.lower()
logger.fdebug('New Filename: ' + nfilename)
if mylar.CONFIG.LOWERCASE_FILENAMES:
nfilename = nfilename.lower()
dst = os.path.join(comlocation, nfilename)
else:
dst = os.path.join(comlocation, nfilename)
logger.fdebug('Source: ' + ofilename)
logger.fdebug('Destination: ' + dst)
rename_this = {"destination_dir": dst,
"nfilename": nfilename,
"issueid": issueid,
"comicid": comicid}
return rename_this

View File

@ -821,6 +821,18 @@ def updateComicLocation():
publisher = re.sub('!', '', dl['ComicPublisher']) # thanks Boom!
year = dl['ComicYear']
if dl['Corrected_Type'] is not None:
booktype = dl['Corrected_Type']
else:
booktype = dl['Type']
if booktype == 'Print' or all([booktype != 'Print', mylar.CONFIG.FORMAT_BOOKTYPE is False]):
chunk_fb = re.sub('\$Type', '', mylar.CONFIG.FOLDER_FORMAT)
chunk_b = re.compile(r'\s+')
chunk_folder_format = chunk_b.sub(' ', chunk_fb)
else:
chunk_folder_format = mylar.CONFIG.FOLDER_FORMAT
comversion = dl['ComicVersion']
if comversion is None:
comversion = 'None'
@ -841,7 +853,8 @@ def updateComicLocation():
'$publisher': publisher.lower(),
'$VolumeY': 'V' + str(year),
'$VolumeN': comversion,
'$Annual': 'Annual'
'$Annual': 'Annual',
'$Type': booktype
}
#set the paths here with the seperator removed allowing for cross-platform altering.
@ -1425,7 +1438,8 @@ def havetotals(refreshit=None):
"totalissues": totalissues,
"haveissues": haveissues,
"DateAdded": comic['LastUpdated'],
"ComicType": comic['Type']})
"Type": comic['Type'],
"Corrected_Type": comic['Corrected_Type']})
return comics
@ -3226,7 +3240,8 @@ def disable_provider(site, newznab=False):
mylar.CONFIG.DOGNZB = False
elif site == 'experimental':
mylar.CONFIG.EXPERIMENTAL = False
elif site == '32P':
mylar.CONFIG.ENABLE_32P = False
def date_conversion(originaldate):
c_obj_date = datetime.datetime.strptime(originaldate, "%Y-%m-%d %H:%M:%S")
@ -3244,7 +3259,10 @@ def job_management(write=False, job=None, last_run_completed=None, current_run=N
if job is None:
dbupdate_newstatus = 'Waiting'
dbupdate_nextrun = None
rss_newstatus = 'Waiting'
if mylar.CONFIG.ENABLE_RSS is True:
rss_newstatus = 'Waiting'
else:
rss_newstatus = 'Paused'
rss_nextrun = None
weekly_newstatus = 'Waiting'
weekly_nextrun = None
@ -3252,7 +3270,10 @@ def job_management(write=False, job=None, last_run_completed=None, current_run=N
search_nextrun = None
version_newstatus = 'Waiting'
version_nextrun = None
monitor_newstatus = 'Waiting'
if mylar.CONFIG.ENABLE_CHECK_FOLDER is True:
monitor_newstatus = 'Waiting'
else:
monitor_newstatus = 'Paused'
monitor_nextrun = None
job_info = myDB.select('SELECT DISTINCT * FROM jobhistory')
@ -3262,31 +3283,37 @@ def job_management(write=False, job=None, last_run_completed=None, current_run=N
if mylar.SCHED_DBUPDATE_LAST is None:
mylar.SCHED_DBUPDATE_LAST = ji['prev_run_timestamp']
dbupdate_newstatus = ji['status']
mylar.UPDATER_STATUS = dbupdate_newstatus
dbupdate_nextrun = ji['next_run_timestamp']
elif 'search' in ji['JobName'].lower():
if mylar.SCHED_SEARCH_LAST is None:
mylar.SCHED_SEARCH_LAST = ji['prev_run_timestamp']
search_newstatus = ji['status']
mylar.SEARCH_STATUS = search_newstatus
search_nextrun = ji['next_run_timestamp']
elif 'rss' in ji['JobName'].lower():
if mylar.SCHED_RSS_LAST is None:
mylar.SCHED_RSS_LAST = ji['prev_run_timestamp']
rss_newstatus = ji['status']
mylar.RSS_STATUS = rss_newstatus
rss_nextrun = ji['next_run_timestamp']
elif 'weekly' in ji['JobName'].lower():
if mylar.SCHED_WEEKLY_LAST is None:
mylar.SCHED_WEEKLY_LAST = ji['prev_run_timestamp']
weekly_newstatus = ji['status']
mylar.WEEKLY_STATUS = weekly_newstatus
weekly_nextrun = ji['next_run_timestamp']
elif 'version' in ji['JobName'].lower():
if mylar.SCHED_VERSION_LAST is None:
mylar.SCHED_VERSION_LAST = ji['prev_run_timestamp']
version_newstatus = ji['status']
mylar.VERSION_STATUS = version_newstatus
version_nextrun = ji['next_run_timestamp']
elif 'monitor' in ji['JobName'].lower():
if mylar.SCHED_MONITOR_LAST is None:
mylar.SCHED_MONITOR_LAST = ji['prev_run_timestamp']
monitor_newstatus = ji['status']
mylar.MONITOR_STATUS = monitor_newstatus
monitor_nextrun = ji['next_run_timestamp']
monitors = {'weekly': mylar.SCHED_WEEKLY_LAST,
@ -3305,21 +3332,27 @@ def job_management(write=False, job=None, last_run_completed=None, current_run=N
elif 'update' in jobinfo.lower():
prev_run_timestamp = mylar.SCHED_DBUPDATE_LAST
newstatus = dbupdate_newstatus
mylar.UPDATER_STATUS = newstatus
elif 'search' in jobinfo.lower():
prev_run_timestamp = mylar.SCHED_SEARCH_LAST
newstatus = search_newstatus
mylar.SEARCH_STATUS = newstatus
elif 'rss' in jobinfo.lower():
prev_run_timestamp = mylar.SCHED_RSS_LAST
newstatus = rss_newstatus
mylar.RSS_STATUS = newstatus
elif 'weekly' in jobinfo.lower():
prev_run_timestamp = mylar.SCHED_WEEKLY_LAST
newstatus = weekly_newstatus
mylar.WEEKLY_STATUS = newstatus
elif 'version' in jobinfo.lower():
prev_run_timestamp = mylar.SCHED_VERSION_LAST
newstatus = version_newstatus
mylar.VERSION_STATUS = newstatus
elif 'monitor' in jobinfo.lower():
prev_run_timestamp = mylar.SCHED_MONITOR_LAST
newstatus = monitor_newstatus
mylar.MONITOR_STATUS = newstatus
jobname = jobinfo[:jobinfo.find('(')-1].strip()
#logger.fdebug('jobinfo: %s' % jobinfo)

View File

@ -32,7 +32,7 @@ import cherrypy
import requests
import mylar
from mylar import logger, helpers, db, mb, cv, parseit, filechecker, search, updater, moveit, comicbookdb
from mylar import logger, filers, helpers, db, mb, cv, parseit, filechecker, search, updater, moveit, comicbookdb
def is_exists(comicid):
@ -49,7 +49,7 @@ def is_exists(comicid):
return False
def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=None, calledfrom=None, annload=None, chkwant=None, issuechk=None, issuetype=None, latestissueinfo=None, csyear=None):
def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=None, calledfrom=None, annload=None, chkwant=None, issuechk=None, issuetype=None, latestissueinfo=None, csyear=None, fixed_type=None):
myDB = db.DBConnection()
controlValueDict = {"ComicID": comicid}
@ -58,7 +58,10 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
if dbcomic is None:
newValueDict = {"ComicName": "Comic ID: %s" % (comicid),
"Status": "Loading"}
comlocation = None
if all([imported, mylar.CONFIG.IMP_PATHS is True]):
comlocation = os.path.dirname(imported['filelisting'][0]['comiclocation'])
else:
comlocation = None
oldcomversion = None
series_status = 'Loading'
lastissueid = None
@ -113,6 +116,9 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
else:
sortname = comic['ComicName']
comic['Corrected_Type'] = fixed_type
if fixed_type is not None and fixed_type != comic['Type']:
logger.info('Forced Comic Type to : %s' % comic['Corrected_Type'])
logger.info('Now adding/updating: ' + comic['ComicName'])
#--Now that we know ComicName, let's try some scraping
@ -197,67 +203,16 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
comicname_filesafe = helpers.filesafe(u_comicnm)
if comlocation is None:
comicdir = comicname_filesafe
series = comicdir
if series[-1:] == '.':
series[:-1]
publisher = re.sub('!', '', comic['ComicPublisher']) # thanks Boom!
publisher = helpers.filesafe(publisher)
year = SeriesYear
booktype = comic['Type']
if booktype == 'Print' or all([comic['Type'] != 'Print', mylar.CONFIG.FORMAT_BOOKTYPE is False]):
chunk_fb = re.sub('\$Type', '', mylar.CONFIG.FOLDER_FORMAT)
chunk_b = re.compile(r'\s+')
chunk_folder_format = chunk_b.sub(' ', chunk_fb)
else:
chunk_folder_format = mylar.CONFIG.FOLDER_FORMAT
comic_values = {'ComicName': comic['ComicName'],
'ComicPublisher': comic['ComicPublisher'],
'ComicYear': SeriesYear,
'ComicVersion': comicVol,
'Type': comic['Type'],
'Corrected_Type': comic['Corrected_Type']}
if any([comicVol is None, comic['Type'] != 'Print']):
comicVol = 'None'
#if comversion is None, remove it so it doesn't populate with 'None'
if comicVol == 'None':
chunk_f_f = re.sub('\$VolumeN', '', chunk_folder_format)
chunk_f = re.compile(r'\s+')
chunk_folder_format = chunk_f.sub(' ', chunk_f_f)
logger.fdebug('No version # found for series, removing from folder format')
logger.fdebug("new folder format: " + str(chunk_folder_format))
#do work to generate folder path
values = {'$Series': series,
'$Publisher': publisher,
'$Year': year,
'$series': series.lower(),
'$publisher': publisher.lower(),
'$VolumeY': 'V' + str(year),
'$VolumeN': comicVol.upper(),
'$Annual': 'Annual',
'$Type': booktype
}
try:
if mylar.CONFIG.FOLDER_FORMAT == '':
comlocation = os.path.join(mylar.CONFIG.DESTINATION_DIR, comicdir, " (" + SeriesYear + ")")
else:
chunk_folder_format = re.sub('[()|[]]', '', chunk_folder_format).strip()
comlocation = os.path.join(mylar.CONFIG.DESTINATION_DIR, helpers.replace_all(chunk_folder_format, values))
except Exception as e:
if 'TypeError' in e:
if mylar.CONFIG.DESTINATION_DIR is None:
logger.error('[ERROR] %s' % e)
logger.error('No Comic Location specified. This NEEDS to be set before anything can be added successfully.')
return
logger.error('[ERROR] %s' % e)
logger.error('Cannot determine Comic Location path properly. Check your Comic Location and Folder Format for any errors.')
return
#comlocation = mylar.CONFIG.DESTINATION_DIR + "/" + comicdir + " (" + comic['ComicYear'] + ")"
if mylar.CONFIG.DESTINATION_DIR == "":
logger.error('There is no Comic Location Path specified - please specify one in Config/Web Interface.')
return
if mylar.CONFIG.REPLACE_SPACES:
#mylar.CONFIG.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
comlocation = comlocation.replace(' ', mylar.CONFIG.REPLACE_CHAR)
dothedew = filers.FileHandlers(comic=comic_values)
comlocation = dothedew.folder_create()
#moved this out of the above loop so it will chk for existance of comlocation in case moved
#if it doesn't exist - create it (otherwise will bugger up later on)
@ -341,6 +296,7 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
# "ComicPublished": gcdinfo['resultPublished'],
"ComicPublished": "Unknown",
"Type": comic['Type'],
"Corrected_Type": comic['Corrected_Type'],
"Collects": issue_list,
"DateAdded": helpers.today(),
"Status": "Loading"}

23
mylar/librarysync.py Executable file → Normal file
View File

@ -57,17 +57,19 @@ def libraryScan(dir=None, append=False, ComicID=None, ComicName=None, cron=None,
cbz_retry = 0
mylar.IMPORT_STATUS = 'Now attempting to parse files for additional information'
myDB = db.DBConnection()
#mylar.IMPORT_PARSED_COUNT #used to count what #/totalfiles the filename parser is currently on
for r, d, f in os.walk(dir):
for files in f:
mylar.IMPORT_FILES +=1
if 'cvinfo' in files:
cv_location.append(r)
logger.fdebug('CVINFO found: ' + os.path.join(r))
if any(files.lower().endswith('.' + x.lower()) for x in extensions):
comic = files
comicpath = os.path.join(r, files)
if mylar.CONFIG.IMP_PATHS is True:
if myDB.select('SELECT * FROM comics JOIN issues WHERE issues.Status="Downloaded" AND ComicLocation=? AND issues.Location=?', [r.decode(mylar.SYS_ENCODING, 'replace'), files.decode(mylar.SYS_ENCODING, 'replace')]):
logger.info('Skipped known issue path: %s' % comicpath)
continue
comic = files
comicsize = os.path.getsize(comicpath)
logger.fdebug('Comic: ' + comic + ' [' + comicpath + '] - ' + str(comicsize) + ' bytes')
@ -148,6 +150,10 @@ def libraryScan(dir=None, append=False, ComicID=None, ComicName=None, cron=None,
cbz_retry +=1
continue
if 'cvinfo' in files:
cv_location.append(r)
logger.fdebug('CVINFO found: ' + os.path.join(r))
mylar.IMPORT_TOTALFILES = comiccnt
logger.info('I have successfully discovered & parsed a total of ' + str(comiccnt) + ' files....analyzing now')
logger.info('I have not been able to determine what ' + str(len(failure_list)) + ' files are')
@ -156,8 +162,8 @@ def libraryScan(dir=None, append=False, ComicID=None, ComicName=None, cron=None,
mylar.IMPORT_STATUS = 'Successfully parsed ' + str(comiccnt) + ' files'
#return queue.put(valreturn)
logger.fdebug(utter_failure_list)
myDB = db.DBConnection()
if len(utter_failure_list) > 0:
logger.fdebug('Failure list: %s' % utter_failure_list)
#let's load in the watchlist to see if we have any matches.
logger.info("loading in the watchlist to see if a series is being watched already...")
@ -504,7 +510,7 @@ def libraryScan(dir=None, append=False, ComicID=None, ComicName=None, cron=None,
for x in issueid_list:
reverse_issueids.append(x['issueid'])
vals = None
vals = []
if len(reverse_issueids) > 0:
mylar.IMPORT_STATUS = 'Now Reverse looking up ' + str(len(reverse_issueids)) + ' IssueIDs to get the ComicIDs'
vals = mylar.cv.getComic(None, 'import', comicidlist=reverse_issueids)
@ -601,6 +607,7 @@ def libraryScan(dir=None, append=False, ComicID=None, ComicName=None, cron=None,
import_cv_ids = 0
else:
import_cv_ids = 0
cvimport_comicids = None
return {'import_by_comicids': import_by_comicids,
'import_count': len(import_by_comicids),

View File

@ -14,7 +14,7 @@ import random
from StringIO import StringIO
import mylar
from mylar import db, logger, ftpsshup, helpers, auth32p, utorrent
from mylar import db, logger, ftpsshup, helpers, auth32p, utorrent, helpers
import torrent.clients.transmission as transmission
import torrent.clients.deluge as deluge
import torrent.clients.qbittorrent as qbittorrent
@ -82,7 +82,7 @@ def torrents(pickfeed=None, seriesname=None, issue=None, feedinfo=None):
feedtype = None
if pickfeed == "1" and mylar.CONFIG.ENABLE_32P: # 32pages new releases feed.
if pickfeed == "1" and mylar.CONFIG.ENABLE_32P is True: # 32pages new releases feed.
feed = 'https://32pag.es/feeds.php?feed=torrents_all&user=' + feedinfo['user'] + '&auth=' + feedinfo['auth'] + '&passkey=' + feedinfo['passkey'] + '&authkey=' + feedinfo['authkey']
feedtype = ' from the New Releases RSS Feed for comics'
verify = bool(mylar.CONFIG.VERIFY_32P)
@ -118,7 +118,7 @@ def torrents(pickfeed=None, seriesname=None, issue=None, feedinfo=None):
feed = mylar.WWTURL + 'rss.php?cat=132,50'
feedtype = ' from the New Releases RSS Feed from WorldWideTorrents'
verify = bool(mylar.CONFIG.PUBLIC_VERIFY)
elif int(pickfeed) >= 7 and feedinfo is not None:
elif int(pickfeed) >= 7 and feedinfo is not None and mylar.CONFIG.ENABLE_32P is True:
#personal 32P notification feeds.
#get the info here
feed = 'https://32pag.es/feeds.php?feed=' + feedinfo['feed'] + '&user=' + feedinfo['user'] + '&auth=' + feedinfo['auth'] + '&passkey=' + feedinfo['passkey'] + '&authkey=' + feedinfo['authkey'] + '&name=' + feedinfo['feedname']
@ -864,6 +864,9 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site, pubhash=None):
if site == '32P':
url = 'https://32pag.es/torrents.php'
if mylar.CONFIG.ENABLE_32P is False:
return "fail"
if mylar.CONFIG.VERIFY_32P == 1 or mylar.CONFIG.VERIFY_32P == True:
verify = True
else:
@ -891,8 +894,7 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site, pubhash=None):
feed32p = auth32p.info32p(reauthenticate=True)
feedinfo = feed32p.authenticate()
if feedinfo == "disable":
mylar.CONFIG.ENABLE_32P = 0
#mylar.config_write()
helpers.disable_provider('32P')
return "fail"
if mylar.CONFIG.PASSKEY_32P is None or mylar.AUTHKEY_32P is None or mylar.KEYS_32P is None:
logger.error('[RSS] Unable to sign-on to 32P to validate settings and initiate download sequence. Please enter/check your username password in the configuration.')
@ -1014,8 +1016,7 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site, pubhash=None):
feedinfo = feed32p.authenticate()
if feedinfo == "disable":
mylar.CONFIG.ENABLE_32P = 0
#mylar.config_write()
helpers.disable_provider('32P')
return "fail"
logger.debug('[TOR2CLIENT-32P] Creating CF Scraper')

View File

@ -54,7 +54,6 @@ class tehMain():
logger.info('[RSS-FEEDS] Initiating Torrent RSS Check.')
if mylar.CONFIG.ENABLE_PUBLIC:
logger.info('[RSS-FEEDS] Initiating Torrent RSS Feed Check on Demonoid / WorldWideTorrents.')
#rsscheck.torrents(pickfeed='3') #TP.SE RSS Check (has to be page-parsed)
rsscheck.torrents(pickfeed='Public') #TPSE = DEM RSS Check + WWT RSS Check
if mylar.CONFIG.ENABLE_32P is True:
logger.info('[RSS-FEEDS] Initiating Torrent RSS Feed Check on 32P.')
@ -75,8 +74,7 @@ class tehMain():
if feedinfo != "disable":
pass
else:
mylar.CONFIG.ENABLE_32P = False
#mylar.config_write()
helpers.disable_provider('32P')
else:
feedinfo = mylar.FEEDINFO_32P

View File

@ -582,9 +582,12 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
#logger.fdebug('comicid: %s' % ComicID)
if RSS == "yes":
if nzbprov == '32P' or nzbprov == 'Public Torrents':
cmname = re.sub("%20", " ", str(comsrc))
logger.fdebug("Sending request to [" + str(nzbprov) + "] RSS for " + ComicName + " : " + str(mod_isssearch))
bb = rsscheck.torrentdbsearch(ComicName, mod_isssearch, ComicID, nzbprov, oneoff)
if all([nzbprov == '32P', mylar.CONFIG.ENABLE_32P is True]) or nzbprov == 'Public Torrents':
cmname = re.sub("%20", " ", str(comsrc))
logger.fdebug("Sending request to [" + str(nzbprov) + "] RSS for " + ComicName + " : " + str(mod_isssearch))
bb = rsscheck.torrentdbsearch(ComicName, mod_isssearch, ComicID, nzbprov, oneoff)
else:
bb = 'no results'
else:
cmname = re.sub("%20", " ", str(comsrc))
logger.fdebug("Sending request to RSS for " + str(findcomic) + " : " + str(mod_isssearch) + " (" + str(ComicYear) + ")")
@ -594,6 +597,9 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
nzbprov_fix = name_torznab
else: nzbprov_fix = nzbprov
bb = rsscheck.nzbdbsearch(findcomic, mod_isssearch, ComicID, nzbprov_fix, ComicYear, ComicVersion, oneoff)
if bb == 'disable':
helpers.disable_provider('32P')
bb = 'no results'
if bb is None:
bb = 'no results'
#this is the API calls
@ -612,7 +618,10 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
#then we call the ajax against the id and issue# and volume (if exists)
a = auth32p.info32p(searchterm=searchterm)
bb = a.searchit()
if bb is None:
if bb == 'disable':
helpers.disable_provider('32P')
bb = 'no results'
elif bb is None:
bb = 'no results'
else:
bb = "no results"
@ -2688,6 +2697,9 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
logger.fdebug("ComicName:" + ComicName)
logger.fdebug("link:" + link)
logger.fdebug("Torrent Provider:" + nzbprov)
if all([mylar.CONFIG.ENABLE_32P is False, nzbprov == '32P']):
logger.fdebug('32P is disabled - not attempting to download item')
return "torrent-fail"
rcheck = rsscheck.torsend2client(ComicName, IssueNumber, comyear, link, nzbprov, nzbid) #nzbid = hash for usage with public torrents
if rcheck == "fail":

View File

@ -36,7 +36,7 @@ def dbUpdate(ComicIDList=None, calledfrom=None, sched=False):
if mylar.CONFIG.UPDATE_ENDED:
logger.info('Updating only Continuing Series (option enabled) - this might cause problems with the pull-list matching for rebooted series')
comiclist = []
completelist = myDB.select('SELECT LatestDate, ComicPublished, ForceContinuing, NewPublish, LastUpdated, ComicID, ComicName, Corrected_SeriesYear, ComicYear from comics WHERE Status="Active" or Status="Loading" order by LastUpdated DESC, LatestDate ASC')
completelist = myDB.select('SELECT LatestDate, ComicPublished, ForceContinuing, NewPublish, LastUpdated, ComicID, ComicName, Corrected_SeriesYear, Corrected_Type, ComicYear from comics WHERE Status="Active" or Status="Loading" order by LastUpdated DESC, LatestDate ASC')
for comlist in completelist:
if comlist['LatestDate'] is None:
recentstatus = 'Loading'
@ -65,15 +65,16 @@ def dbUpdate(ComicIDList=None, calledfrom=None, sched=False):
"ComicID": comlist['ComicID'],
"ComicName": comlist['ComicName'],
"ComicYear": comlist['ComicYear'],
"Corrected_SeriesYear": comlist['Corrected_SeriesYear']})
"Corrected_SeriesYear": comlist['Corrected_SeriesYear'],
"Corrected_Type": comlist['Corrected_Type']})
else:
comiclist = myDB.select('SELECT LatestDate, LastUpdated, ComicID, ComicName, ComicYear, Corrected_SeriesYear from comics WHERE Status="Active" or Status="Loading" order by LastUpdated DESC, latestDate ASC')
comiclist = myDB.select('SELECT LatestDate, LastUpdated, ComicID, ComicName, ComicYear, Corrected_SeriesYear, Corrected_Type from comics WHERE Status="Active" or Status="Loading" order by LastUpdated DESC, latestDate ASC')
else:
comiclist = []
comiclisting = ComicIDList
for cl in comiclisting:
comiclist += myDB.select('SELECT ComicID, ComicName, ComicYear, Corrected_SeriesYear, LastUpdated from comics WHERE ComicID=? order by LastUpdated DESC, LatestDate ASC', [cl])
comiclist += myDB.select('SELECT ComicID, ComicName, ComicYear, Corrected_SeriesYear, Corrected_Type, LastUpdated from comics WHERE ComicID=? order by LastUpdated DESC, LatestDate ASC', [cl])
if all([sched is False, calledfrom is None]):
logger.info('Starting update for %i active comics' % len(comiclist))
@ -86,6 +87,10 @@ def dbUpdate(ComicIDList=None, calledfrom=None, sched=False):
for comic in sorted(comiclist, key=operator.itemgetter('LastUpdated'), reverse=True):
dspyear = comic['ComicYear']
csyear = None
fixed_type = None
if comic['Corrected_Type'] is not None:
fixed_type = comic['Corrected_Type']
if comic['Corrected_SeriesYear'] is not None:
csyear = comic['Corrected_SeriesYear']
@ -180,7 +185,7 @@ def dbUpdate(ComicIDList=None, calledfrom=None, sched=False):
logger.fdebug("Refreshing the series and pulling in new data using only CV.")
if whack == False:
chkstatus = mylar.importer.addComictoDB(ComicID, mismatch, calledfrom='dbupdate', annload=annload, csyear=csyear)
chkstatus = mylar.importer.addComictoDB(ComicID, mismatch, calledfrom='dbupdate', annload=annload, csyear=csyear, fixed_type=fixed_type)
if chkstatus['status'] == 'complete':
#delete the data here if it's all valid.
logger.fdebug("Deleting all old issue data to make sure new data is clean...")
@ -692,7 +697,8 @@ def nzblog(IssueID, NZBName, ComicName, SARC=None, IssueArcID=None, id=None, pro
if chkd is None:
pass
else:
if chkd['AltNZBName'] is None or chkd['AltNZBName'] == '':
altnames = chkd['AltNZBName']
if any([altnames is None, altnameas == '']):
#we need to wipe the entry so we can re-update with the alt-nzbname if required
myDB.action('DELETE FROM nzblog WHERE IssueID=? and Provider=?', [IssueID, prov])
logger.fdebug('Deleted stale entry from nzblog for IssueID: ' + str(IssueID) + ' [' + prov + ']')
@ -925,6 +931,12 @@ def forceRescan(ComicID, archive=None, module=None, recheck=False):
altnames = rescan['AlternateSearch'] + '##'
else:
altnames = ''
if all([rescan['Type'] != 'Print', rescan['Type'] != 'Digital']) or rescan['Corrected_Type'] == 'TPB':
booktype = 'TPB'
else:
booktype = None
annscan = myDB.select('SELECT * FROM annuals WHERE ComicID=?', [ComicID])
if annscan is None:
pass
@ -964,9 +976,13 @@ def forceRescan(ComicID, archive=None, module=None, recheck=False):
files_arc = arcval.listFiles()
fca.append(files_arc)
comiccnt = int(files_arc['comiccount'])
fcb = []
fc = {}
#if len(fca) > 0:
is_cnt = myDB.select("SELECT COUNT(*) FROM issues WHERE ComicID=?", [ComicID])
iscnt = is_cnt[0][0]
for ca in fca:
i = 0
while True:
@ -974,16 +990,18 @@ def forceRescan(ComicID, archive=None, module=None, recheck=False):
cla = ca['comiclist'][i]
except (IndexError, KeyError) as e:
break
if booktype == 'TPB' and iscnt > 1:
just_the_digits = re.sub('[^0-9]', '', cla['SeriesVolume']).strip()
else:
just_the_digits = cla['JusttheDigits']
fcb.append({"ComicFilename": cla['ComicFilename'],
"ComicLocation": cla['ComicLocation'],
"ComicSize": cla['ComicSize'],
"JusttheDigits": cla['JusttheDigits'],
"JusttheDigits": just_the_digits,
"AnnualComicID": cla['AnnualComicID']})
i+=1
fc['comiclist'] = fcb
is_cnt = myDB.select("SELECT COUNT(*) FROM issues WHERE ComicID=?", [ComicID])
iscnt = is_cnt[0][0]
#iscnt = rescan['Total']
havefiles = 0
if mylar.CONFIG.ANNUALS_ON:

View File

@ -187,12 +187,18 @@ class WebInterface(object):
issues_list = None
#logger.info('issues_list: %s' % issues_list)
if comic['Corrected_Type'] == 'TPB':
force_type = 1
else:
force_type = 0
comicConfig = {
"fuzzy_year0": helpers.radio(int(usethefuzzy), 0),
"fuzzy_year1": helpers.radio(int(usethefuzzy), 1),
"fuzzy_year2": helpers.radio(int(usethefuzzy), 2),
"skipped2wanted": helpers.checked(skipped2wanted),
"force_continuing": helpers.checked(force_continuing),
"force_type": helpers.checked(force_type),
"delete_dir": helpers.checked(mylar.CONFIG.DELETE_REMOVE_DIR),
"allow_packs": helpers.checked(int(allowpacks)),
"corrected_seriesyear": comic['ComicYear'],
@ -2178,23 +2184,48 @@ class WebInterface(object):
annualDelete.exposed = True
def previewRename(self, comicidlist):
def previewRename(self, **args): #comicid=None, comicidlist=None):
file_format = mylar.CONFIG.FILE_FORMAT
myDB = db.DBConnection()
resultlist = []
for comicid in comicidlist:
comic = myDB.selectone("SELECT * FROM comics WHERE ComicID=?", [comicid]).fetchone()
for k,v in args.items():
if any([k == 'x', k == 'y']):
continue
elif 'file_format' in k:
file_format = str(v)
elif 'comicid' in k:
if type(v) is list:
comicid = str(' '.join(v))
elif type(v) is unicode:
comicid = re.sub('[\]\[\']', '', v.decode('utf-8').encode('ascii')).strip()
else:
comicid = v
if comicid is not None and type(comicid) is not list:
comicidlist = []
comicidlist.append(comicid)
for cid in comicidlist:
comic = myDB.selectone("SELECT * FROM comics WHERE ComicID=?", [cid]).fetchone()
comicdir = comic['ComicLocation']
comicname = comic['ComicName']
issue = myDB.selectone("SELECT * FROM issues WHERE ComicID=? AND Location is not None ORDER BY ReleaseDate", [comicid]).fetchone()
if 'annual' in issue['Location'].lower():
annualize = 'yes'
else:
annualize = None
renameiss = helpers.rename_param(comicid, comicname, issue['Issue_Number'], issue['Location'], comicyear=None, issueid=issue['IssueID'], annualize=annualize)
resultlist.append({'original': issue['Location'],
'new': renameiss['nfilename']})
issuelist = myDB.select("SELECT * FROM issues WHERE ComicID=? AND Location is not NULL ORDER BY ReleaseDate", [str(cid)])
if issuelist:
for issue in issuelist:
if 'annual' in issue['Location'].lower():
annualize = 'yes'
else:
annualize = None
import filers
rniss = filers.FileHandlers(ComicID=str(cid), IssueID=issue['IssueID'])
renameiss = rniss.rename_file(issue['Location'], annualize=annualize, file_format=file_format)
#renameiss = helpers.rename_param(comicid, comicname, issue['Issue_Number'], issue['Location'], comicyear=None, issueid=issue['IssueID'], annualize=annualize)
resultlist.append({'issueid': renameiss['issueid'],
'comicid': renameiss['comicid'],
'original': issue['Location'],
'new': renameiss['nfilename']})
logger.info('resultlist: %s' % resultlist)
return serve_template(templatename="previewrename.html", title="Preview Renamer", resultlist=resultlist, file_format=file_format, comicid=comicidlist)
previewRename.exposed = True
def manualRename(self, comicid):
@ -2288,6 +2319,10 @@ class WebInterface(object):
else:
next_run = None
if 'rss' in jb['JobName'].lower():
if jb['Status'] == 'Waiting' and mylar.CONFIG.ENABLE_RSS is False:
mylar.RSS_STATUS = 'Paused'
elif jb['Status'] == 'Paused' and mylar.CONFIG.ENABLE_RSS is True:
mylar.RSS_STATUS = 'Waiting'
status = mylar.RSS_STATUS
interval = str(mylar.CONFIG.RSS_CHECKINTERVAL) + ' mins'
if 'weekly' in jb['JobName'].lower():
@ -2307,6 +2342,9 @@ class WebInterface(object):
status = mylar.VERSION_STATUS
interval = str(mylar.CONFIG.CHECK_GITHUB_INTERVAL) + 'mins'
if status != jb['Status'] and not('rss' in jb['JobName'].lower()):
status = jb['Status']
tmp.append({'prev_run_datetime': prev_run,
'next_run_datetime': next_run,
'interval': interval,
@ -2328,17 +2366,32 @@ class WebInterface(object):
if jobid is not None:
myDB = db.DBConnection()
if mode == 'pause':
mylar.SCHED.pause_job(jobid)
try:
mylar.SCHED.pause_job(jobid)
except:
pass
logger.info('[%s] Paused scheduled runtime.' % job)
ctrl = {'JobName': job}
val = {'Status': 'Paused'}
if jobid == 'rss':
mylar.CONFIG.ENABLE_RSS = False
elif jobid == 'monitor':
mylar.CONFIG.ENABLE_CHECK_FOLDER = False
myDB.upsert('jobhistory', val, ctrl)
elif mode == 'resume':
mylar.SCHED.resume_job(jobid)
try:
mylar.SCHED.resume_job(jobid)
except:
pass
logger.info('[%s] Resumed scheduled runtime.' % job)
ctrl = {'JobName': job}
val = {'Status': 'Waiting'}
myDB.upsert('jobhistory', val, ctrl)
if jobid == 'rss':
mylar.CONFIG.ENABLE_RSS = True
elif jobid == 'monitor':
mylar.CONFIG.ENABLE_CHECK_FOLDER = True
helpers.job_management()
else:
logger.warn('%s cannot be matched against any scheduled jobs - maybe you should restart?' % job)
@ -3872,7 +3925,7 @@ class WebInterface(object):
return mylar.IMPORT_STATUS
Check_ImportStatus.exposed = True
def comicScan(self, path, scan=0, libraryscan=0, redirect=None, autoadd=0, imp_move=0, imp_rename=0, imp_metadata=0, forcescan=0):
def comicScan(self, path, scan=0, libraryscan=0, redirect=None, autoadd=0, imp_move=0, imp_paths=0, imp_rename=0, imp_metadata=0, forcescan=0):
import Queue
queue = Queue.Queue()
@ -3883,10 +3936,15 @@ class WebInterface(object):
# #to handle long paths, let's append the '\\?\' to the path to allow for unicode windows api access
# path = "\\\\?\\" + path
mylar.CONFIG.COMIC_DIR = path
mylar.CONFIG.IMP_MOVE = imp_move
mylar.CONFIG.IMP_RENAME = imp_rename
mylar.CONFIG.IMP_METADATA = imp_metadata
#mylar.config_write()
mylar.CONFIG.IMP_MOVE = bool(imp_move)
mylar.CONFIG.IMP_RENAME = bool(imp_rename)
mylar.CONFIG.IMP_METADATA = bool(imp_metadata)
mylar.CONFIG.IMP_PATHS = bool(imp_paths)
mylar.CONFIG.configure(update=True)
# Write the config
logger.info('Now updating config...')
mylar.CONFIG.writeconfig()
logger.info('forcescan is: ' + str(forcescan))
if mylar.IMPORTLOCK and forcescan == 1:
@ -4807,13 +4865,18 @@ class WebInterface(object):
raise cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % comicid)
manual_annual_add.exposed = True
def comic_config(self, com_location, ComicID, alt_search=None, fuzzy_year=None, comic_version=None, force_continuing=None, alt_filename=None, allow_packs=None, corrected_seriesyear=None, torrentid_32p=None):
def comic_config(self, com_location, ComicID, alt_search=None, fuzzy_year=None, comic_version=None, force_continuing=None, force_type=None, alt_filename=None, allow_packs=None, corrected_seriesyear=None, torrentid_32p=None):
myDB = db.DBConnection()
chk1 = myDB.selectone('SELECT ComicLocation FROM comics WHERE ComicID=?', [ComicID]).fetchone()
if chk1 is None:
chk1 = myDB.selectone('SELECT ComicLocation, Corrected_Type FROM comics WHERE ComicID=?', [ComicID]).fetchone()
if chk1[0] is None:
orig_location = com_location
else:
orig_location = chk1['ComicLocation']
orig_location = chk1[0]
if chk1[1] is None:
orig_type = None
else:
orig_type = chk1[1]
#--- this is for multiple search terms............
#--- works, just need to redo search.py to accomodate multiple search terms
ffs_alt = []
@ -4837,7 +4900,7 @@ class WebInterface(object):
asearch = str(alt_search)
controlValueDict = {'ComicID': ComicID}
newValues = {"ComicLocation": com_location}
newValues = {}
if asearch is not None:
if re.sub(r'\s', '', asearch) == '':
newValues['AlternateSearch'] = "None"
@ -4869,6 +4932,21 @@ class WebInterface(object):
else:
newValues['ForceContinuing'] = 1
if force_type is not None:
newValues['Corrected_Type'] = 'TPB'
else:
newValues['Corrected_Type'] = None
if orig_type != force_type:
if '$Type' in mylar.CONFIG.FOLDER_FORMAT and com_location == orig_location:
#rename folder to accomodate new forced TPB format.
import filers
x = filers.FileHandlers(ComicID=ComicID)
newcom_location = x.folder_create(booktype=newValues['Corrected_Type'])
if newcom_location is not None:
com_location = newcom_location
if allow_packs is None:
newValues['AllowPacks'] = 0
else:
@ -4899,6 +4977,9 @@ class WebInterface(object):
if not checkdirectory:
logger.warn('Error trying to validate/create directory. Aborting this process at this time.')
return
newValues['ComicLocation'] = com_location
myDB.upsert("comics", newValues, controlValueDict)
logger.fdebug('Updated Series options!')
raise cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % ComicID)