FIX: (#746) updated autoProcessComics.py / ComicRN.py's which will now send proper completion messeages to clients, FIX: (#752) refresh series will now test if a series is 'out of whack' with it's numerical issue count (ie. 5/4) or it has no issue data due to a bad refresh / api maxing out and will adjust it's processing to accomodate either, IMP: (#750) Added ComicVine API Checker which will check API counts at regular intervals to inform/warn users of usage, as well as adding a screen-time display of API hits / mins used at the bottom of every page (refreshing/reloading pages will update counts), FIX: (#747)EOL normalization (dos2unix) on search.py - removed classes & exceptions as not being used, IMP: (#747) Skip processing issues with an invalid store date & issue date (thnx rupaschomaker), FIX: Removed strings when searching/logging torrents as was causing ascii errors especially with KAT, IMP: Added [META-TAGGING] to logging for meta-tagging module, IMP: Added ability in GUI to select CR or Cbl tags (or both) when writing metadata to cbz files, IMP: Improved support/usage with ComicTagger v1.1.15 which allows for personal CV API Key usage - if supplied to Mylar, will use when tagging with ComicTagger, IMP: Added Manual Search option to allow for individual searches of issues without changing initial status.

This commit is contained in:
evilhero 2014-06-18 15:58:19 -04:00
parent 2ce158b2e3
commit c510860c46
19 changed files with 1957 additions and 1941 deletions

View File

@ -37,6 +37,7 @@ body { font:13px/1.231 sans-serif; *font-size:small; }
select, input, textarea, button { font:99% sans-serif; }
pre, code, kbd, samp { font-family: monospace, sans-serif; }
html { overflow-y: scroll; }
a:hover, a:active { outline: none; }
ul, ol { margin-left: 2em; }
@ -160,6 +161,7 @@ table#artist_table td#album { vertical-align: middle; text-align: left; min-widt
table#artist_table td#have { vertical-align: middle; }
div#paddingheader { padding-top: 48px; font-size: 24px; font-weight: bold; text-align: center; }
div#paddingheadertitle { padding-top: 24px; font-size: 24px; font-weight: bold; text-align: center; }
div#nopaddingheader { font-size: 24px; font-weight: bold; text-align: center; }
table#issue_table { background-color: grey; width: 100%; padding: 10px; }

View File

@ -1,6 +1,7 @@
<%
import mylar
from mylar import version
from mylar.helpers import cvapi_check
%>
<!doctype html>
<!--[if lt IE 7 ]> <html lang="en" class="no-js ie6"> <![endif]-->
@ -95,6 +96,8 @@
%if version.MYLAR_VERSION != 'master':
(${version.MYLAR_VERSION})
%endif
<br/>
API Usage: ${cvapi_check(True)}
</div>
</footer>
<a href="#main" id="toTop"><span>Back to top</span></a>

View File

@ -33,13 +33,12 @@
<img src="interfaces/default/images/loader_black.gif" alt="loading" style="float:left; margin-right: 5px;"/>
%endif
<div class="row">
<a href="${comic['DetailURL']}" target="_blank">${comic['ComicName']} (${comic['ComicYear']})</a>
<a href="${comic['DetailURL']}" target="_blank">${comic['ComicName']} (${comic['ComicYear']})</a>
%if comic['Status'] == 'Loading':
<h3><i>(Comic information is currently being loaded)</i></h3>
%endif
</div>
</h1>
</h1>
<div style="z-index:10; position: absolute; right: 0; top: 10;">
<a href="comicDetails?ComicID=${series['Previous']}"><img src="interfaces/default/images/prev.gif" width="16" height="18" /></a>
<a href="comicDetails?ComicID=${series['Next']}"><img src="interfaces/default/images/next.gif" width="16" height="18" /></a>
@ -363,12 +362,13 @@
%endif
</td>
<td id="options">
<a href="#" title="Manual Search" onclick="doAjaxCall('queueit?ComicID=${issue['ComicID']}&IssueID=${issue['IssueID']}&ComicIssue=${issue['Issue_Number']}&ComicYear=${issue['IssueDate']}&mode=want&manualsearch=True',$(this),'table')" data-success="Manually searching for ${comic['ComicName']} #${issue['Issue_Number']}"><img src="interfaces/default/images/search.png" height="25" width="25" /></a>
%if issue['Status'] == 'Skipped' or issue['Status'] == 'Ignored':
<a href="#" title="Mark issue as Wanted" onclick="doAjaxCall('queueissue?ComicID=${issue['ComicID']}&IssueID=${issue['IssueID']}&ComicIssue=${issue['Issue_Number']}&ComicYear=${issue['IssueDate']}&mode=want',$(this),'table')" data-success="Issue Marked as Wanted"><img src="interfaces/default/images/wanted_icon.png" height="25" width="25" /></a>
<a href="#" title="Mark issue as Wanted" onclick="doAjaxCall('queueit?ComicID=${issue['ComicID']}&IssueID=${issue['IssueID']}&ComicIssue=${issue['Issue_Number']}&ComicYear=${issue['IssueDate']}&mode=want',$(this),'table')" data-success="Issue Marked as Wanted"><img src="interfaces/default/images/wanted_icon.png" height="25" width="25" /></a>
%elif (issue['Status'] == 'Wanted'):
<a href="#" title="Mark issue as Skipped" onclick="doAjaxCall('unqueueissue?IssueID=${issue['IssueID']}&ComicID=${issue['ComicID']}',$(this),'table')" data-success="'${issue['Issue_Number']}' has been marked as skipped"><img src="interfaces/default/images/skipped_icon.png" height="25" width="25" /></a>
%elif (issue['Status'] == 'Snatched'):
<a href="#" onclick="doAjaxCall('queueissue?ComicID=${issue['ComicID']}&IssueID=${issue['IssueID']}&ComicIssue=${issue['Issue_Number']}&mode=want', $(this),'table')" data-success="Retrying the same version of '${issue['ComicName']}' '${issue['Issue_Number']}'" title="Retry the same download again"><img src="interfaces/default/images/retry_icon.png" height="25" width="25" /></a>
<a href="#" onclick="doAjaxCall('queueit?ComicID=${issue['ComicID']}&IssueID=${issue['IssueID']}&ComicIssue=${issue['Issue_Number']}&mode=want', $(this),'table')" data-success="Retrying the same version of '${issue['ComicName']}' '${issue['Issue_Number']}'" title="Retry the same download again"><img src="interfaces/default/images/retry_icon.png" height="25" width="25" /></a>
<a href="#" title="Mark issue as Skipped" onclick="doAjaxCall('unqueueissue?IssueID=${issue['IssueID']}&ComicID=${issue['ComicID']}',$(this),'table')" data-success="'${issue['Issue_Number']}' has been marked as skipped"><img src="interfaces/default/images/skipped_icon.png" height="25" width="25" /></a>
%elif (issue['Status'] == 'Downloaded'):
<%
@ -384,7 +384,7 @@
%endif
<a href="#" title="Add to Reading List" onclick="doAjaxCall('addtoreadlist?IssueID=${issue['IssueID']}',$(this),'table')" data-success="${issue['Issue_Number']} added to Reading List"><img src="interfaces/default/images/glasses-icon.png" height="25" width="25" /></a>
%else:
<a href="#" onclick="doAjaxCall('queueissue?ComicID=${issue['ComicID']}&IssueID=${issue['IssueID']}&ComicIssue=${issue['Issue_Number']}&mode=want', $(this),'table')" data-success="Retrying the same version of '${issue['ComicName']}' '${issue['Issue_Number']}'" title="Retry the same download again"><img src="interfaces/default/images/retry_icon.png" height="25" width="25" /></a>
<a href="#" onclick="doAjaxCall('queueit?ComicID=${issue['ComicID']}&IssueID=${issue['IssueID']}&ComicIssue=${issue['Issue_Number']}&mode=want', $(this),'table')" data-success="Retrying the same version of '${issue['ComicName']}' '${issue['Issue_Number']}'" title="Retry the same download again"><img src="interfaces/default/images/retry_icon.png" height="25" width="25" /></a>
<a href="#" title="Mark issue as Skipped" onclick="doAjaxCall('unqueueissue?IssueID=${issue['IssueID']}&ComicID=${issue['ComicID']}',$(this),'table')" data-success="'${issue['Issue_Number']}' has been marked as skipped"><img src="interfaces/default/images/skipped_icon.png" height="25" width="25" /></a>
%endif
<a href="#" onclick="doAjaxCall('archiveissue?IssueID=${issue['IssueID']}',$(this),'table')"><img src="interfaces/default/images/archive_icon.png" height="25" width="25" title="Mark issue as Archived"></a>
@ -479,6 +479,7 @@
</td>
<td id="aoptions">
<% amode = 'want_ann' %>
<a href="#" title="Manual Search" onclick="doAjaxCall('queueit?ComicID=${annual['ComicID']}&IssueID=${annual['IssueID']}&ComicIssue=${annual['Issue_Number']}&ComicYear=${annual['IssueDate']}&mode=${amode}&manualsearch=True',$(this),'table')" data-success="Manually searching for ${annual['ComicName']} #${annual['Issue_Number']}"><img src="interfaces/default/images/search.png" height="25" width="25" /></a>
<a href="#" title="Mark issue as Wanted" onclick="doAjaxCall('queueissue?ComicID=${annual['ComicID']}&IssueID=${annual['IssueID']}&ComicIssue=${annual['Issue_Number']}&ComicYear=${annual['IssueDate']}&mode=${amode}',$(this),'table')"><img src="interfaces/default/images/wanted_icon.png" height="25" width="25" /></a>
<a href="#" title="Mark issue as Skipped" onclick="doAjaxCall('unqueueissue?IssueID=${annual['IssueID']}&ComicID=${annual['ComicID']}',$(this),'table')" data-success="'${annual['Issue_Number']}' has been marked as skipped"><img src="interfaces/default/images/skipped_icon.png" height="25" width="25" /></a>
<a href="#" title="Add to Reading List"><img src="interfaces/default/images/glasses-icon.png" height="25" width="25" /></a>
@ -543,7 +544,7 @@
data: {
resources: "volume",
format: "json",
api_key: "583939a3df0a25fc4e8b7a29934a13078002dc27",
api_key: "",
query: request.term
},
success: function( data ) {

View File

@ -609,10 +609,9 @@
</div>
</fieldset>
<fieldset>
<legend>Metadata Tagging</legend>
<legend>Metadata Tagging</legend><small class="heading"><span style="float: left; margin-right: .3em; margin-top: 4px;" class="ui-icon ui-icon-info"></span>ComicTagger and configparser are required</small>
<div class="row checkbox left clearfix">
<input id="enable_meta" type="checkbox" onclick="initConfigCheckbox($this));" name="enable_meta" value="1" ${config['enable_meta']} /><label>Enable Metadata Tagging</label>
<small class="heading"><span style="float: left; margin-right: .3em; margin-top: 4px;" class="ui-icon ui-icon-info"></span>You need to have ComicTagger and configparser installed</small>
</div>
<div class="config">
<div class="row">
@ -620,6 +619,16 @@
<input type="text" name="cmtagger_path" value="${config['cmtagger_path']}" size="30" />
<small>If left blank, will assume it's in root of mylar</small>
</div>
<div class="row checkbox left clearfix">
<input type="checkbox" name="ct_tag_cr" value="1" ${config['ct_tag_cr']} /><label>Write ComicRack (cr) tags (ComicInfo.xml)</label>
</div>
<div class="row checkbox left clearfix">
<input type="checkbox" name="ct_tag_cbl" value="1" ${config['ct_tag_cbl']} /><label>Write ComicBookLover (Cbl) tags (zip comment)</label>
</div>
<div>
<small><center>If ComicVine API Key specified, will use with ComicTagger</center></small>
<small><center>Writing each type of metadata will increase API count respectively</center></small>
</div>
</div>
</fieldset>

Binary file not shown.

After

Width:  |  Height:  |  Size: 321 B

View File

@ -92,6 +92,7 @@ HTTP_HOST = None
HTTP_USERNAME = None
HTTP_PASSWORD = None
HTTP_ROOT = None
HTTPS_FORCE_ON = False
API_ENABLED = False
API_KEY = None
LAUNCH_BROWSER = False
@ -106,6 +107,8 @@ SEARCH_DELAY = 1
COMICVINE_API = None
DEFAULT_CVAPI = '583939a3df0a25fc4e8b7a29934a13078002dc27'
CVAPI_COUNT = 0
CVAPI_TIME = None
CHECK_GITHUB = False
CHECK_GITHUB_ON_STARTUP = False
@ -264,6 +267,8 @@ BIGGIE_PUB = 55
ENABLE_META = 0
CMTAGGER_PATH = None
CT_TAG_CR = 1
CT_TAG_CBL = 1
ENABLE_RSS = 0
RSS_CHECKINTERVAL = 20
@ -338,15 +343,15 @@ def initialize():
with INIT_LOCK:
global __INITIALIZED__, COMICVINE_API, DEFAULT_CVAPI, FULL_PATH, PROG_DIR, VERBOSE, DAEMON, COMICSORT, DATA_DIR, CONFIG_FILE, CFG, CONFIG_VERSION, LOG_DIR, CACHE_DIR, MAX_LOGSIZE, LOGVERBOSE, OLDCONFIG_VERSION, OS_DETECT, OS_LANG, OS_ENCODING, \
HTTP_PORT, HTTP_HOST, HTTP_USERNAME, HTTP_PASSWORD, HTTP_ROOT, API_ENABLED, API_KEY, LAUNCH_BROWSER, GIT_PATH, SAFESTART, \
global __INITIALIZED__, COMICVINE_API, DEFAULT_CVAPI, CVAPI_COUNT, CVAPI_TIME, FULL_PATH, PROG_DIR, VERBOSE, DAEMON, COMICSORT, DATA_DIR, CONFIG_FILE, CFG, CONFIG_VERSION, LOG_DIR, CACHE_DIR, MAX_LOGSIZE, LOGVERBOSE, OLDCONFIG_VERSION, OS_DETECT, OS_LANG, OS_ENCODING, \
HTTP_PORT, HTTP_HOST, HTTP_USERNAME, HTTP_PASSWORD, HTTP_ROOT, HTTPS_FORCE_ON, API_ENABLED, API_KEY, LAUNCH_BROWSER, GIT_PATH, SAFESTART, \
CURRENT_VERSION, LATEST_VERSION, CHECK_GITHUB, CHECK_GITHUB_ON_STARTUP, CHECK_GITHUB_INTERVAL, USER_AGENT, DESTINATION_DIR, \
DOWNLOAD_DIR, USENET_RETENTION, SEARCH_INTERVAL, NZB_STARTUP_SEARCH, INTERFACE, AUTOWANT_ALL, AUTOWANT_UPCOMING, ZERO_LEVEL, ZERO_LEVEL_N, COMIC_COVER_LOCAL, HIGHCOUNT, \
LIBRARYSCAN, LIBRARYSCAN_INTERVAL, DOWNLOAD_SCAN_INTERVAL, NZB_DOWNLOADER, USE_SABNZBD, SAB_HOST, SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_PRIORITY, SAB_DIRECTORY, USE_BLACKHOLE, BLACKHOLE_DIR, ADD_COMICS, COMIC_DIR, IMP_MOVE, IMP_RENAME, IMP_METADATA, \
USE_NZBGET, NZBGET_HOST, NZBGET_PORT, NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_PRIORITY, NZBGET_DIRECTORY, NZBSU, NZBSU_UID, NZBSU_APIKEY, DOGNZB, DOGNZB_UID, DOGNZB_APIKEY, \
NEWZNAB, NEWZNAB_NAME, NEWZNAB_HOST, NEWZNAB_APIKEY, NEWZNAB_UID, NEWZNAB_ENABLED, EXTRA_NEWZNABS, NEWZNAB_EXTRA, \
RAW, RAW_PROVIDER, RAW_USERNAME, RAW_PASSWORD, RAW_GROUPS, EXPERIMENTAL, ALTEXPERIMENTAL, \
ENABLE_META, CMTAGGER_PATH, INDIE_PUB, BIGGIE_PUB, IGNORE_HAVETOTAL, PROVIDER_ORDER, \
ENABLE_META, CMTAGGER_PATH, CT_TAG_CR, CT_TAG_CBL, INDIE_PUB, BIGGIE_PUB, IGNORE_HAVETOTAL, PROVIDER_ORDER, \
dbUpdateScheduler, searchScheduler, RSSScheduler, WeeklyScheduler, VersionScheduler, FolderMonitorScheduler, \
ENABLE_TORRENTS, MINSEEDS, TORRENT_LOCAL, LOCAL_WATCHDIR, TORRENT_SEEDBOX, SEEDBOX_HOST, SEEDBOX_PORT, SEEDBOX_USER, SEEDBOX_PASS, SEEDBOX_WATCHDIR, \
ENABLE_RSS, RSS_CHECKINTERVAL, RSS_LASTRUN, ENABLE_TORRENT_SEARCH, ENABLE_KAT, KAT_PROXY, ENABLE_CBT, CBT_PASSKEY, SNATCHEDTORRENT_NOTIFY, \
@ -385,6 +390,7 @@ def initialize():
HTTP_USERNAME = check_setting_str(CFG, 'General', 'http_username', '')
HTTP_PASSWORD = check_setting_str(CFG, 'General', 'http_password', '')
HTTP_ROOT = check_setting_str(CFG, 'General', 'http_root', '/')
HTTPS_FORCE_ON = bool(check_setting_int(CFG, 'General', 'https_force_on', 0))
API_ENABLED = bool(check_setting_int(CFG, 'General', 'api_enabled', 0))
API_KEY = check_setting_str(CFG, 'General', 'api_key', '')
LAUNCH_BROWSER = bool(check_setting_int(CFG, 'General', 'launch_browser', 1))
@ -512,6 +518,8 @@ def initialize():
ENABLE_META = bool(check_setting_int(CFG, 'General', 'enable_meta', 0))
CMTAGGER_PATH = check_setting_str(CFG, 'General', 'cmtagger_path', '')
CT_TAG_CR = bool(check_setting_int(CFG, 'General', 'ct_tag_cr', 1))
CT_TAG_CBL = bool(check_setting_int(CFG, 'General', 'ct_tag_cbl', 1))
INDIE_PUB = check_setting_str(CFG, 'General', 'indie_pub', '75')
BIGGIE_PUB = check_setting_str(CFG, 'General', 'biggie_pub', '55')
@ -1015,6 +1023,7 @@ def config_write():
new_config['General']['http_username'] = HTTP_USERNAME
new_config['General']['http_password'] = HTTP_PASSWORD
new_config['General']['http_root'] = HTTP_ROOT
new_config['General']['https_force_on'] = int(HTTPS_FORCE_ON)
new_config['General']['api_enabled'] = int(API_ENABLED)
new_config['General']['api_key'] = API_KEY
new_config['General']['launch_browser'] = int(LAUNCH_BROWSER)
@ -1089,6 +1098,8 @@ def config_write():
new_config['General']['fftonewcom_dir'] = int(FFTONEWCOM_DIR)
new_config['General']['enable_meta'] = int(ENABLE_META)
new_config['General']['cmtagger_path'] = CMTAGGER_PATH
new_config['General']['ct_tag_cr'] = int(CT_TAG_CR)
new_config['General']['ct_tag_cbl'] = int(CT_TAG_CBL)
new_config['General']['indie_pub'] = INDIE_PUB
new_config['General']['biggie_pub'] = BIGGIE_PUB
@ -1238,6 +1249,10 @@ def start():
helpers.latestdate_fix()
#start the ComicVine API Counter here.
logger.info('Initiating the ComicVine API Checker to report API hits every 5 minutes.')
SCHED.add_interval_job(helpers.cvapi_check, minutes=5)
#initiate startup rss feeds for torrents/nzbs here...
if ENABLE_RSS:
SCHED.add_interval_job(rsscheck.tehMain, minutes=int(RSS_CHECKINTERVAL))
@ -1263,10 +1278,10 @@ def start():
SCHED.add_interval_job(versioncheck.checkGithub, minutes=CHECK_GITHUB_INTERVAL)
#run checkFolder every X minutes (basically Manual Run Post-Processing)
logger.info('CHECK_FOLDER SET TO: ' + str(CHECK_FOLDER))
logger.info('Monitor folder set to : ' + str(CHECK_FOLDER))
if CHECK_FOLDER:
if DOWNLOAD_SCAN_INTERVAL >0:
logger.info('Setting monitor on folder : ' + str(CHECK_FOLDER))
logger.info('Enabling folder monitor for : ' + str(CHECK_FOLDER) + ' every ' + str(DOWNLOAD_SCAN_INTERVAL) + ' minutes.')
#FolderMonitorScheduler.thread.start()
SCHED.add_interval_job(helpers.checkFolder, minutes=int(DOWNLOAD_SCAN_INTERVAL))
else:

View File

@ -1,130 +0,0 @@
# This file is part of Mylar.
#
# Mylar is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mylar is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mylar. If not, see <http://www.gnu.org/licenses/>.
#########################################
## Stolen from Sick-Beard's classes.py ##
#########################################
import mylar
import urllib
import datetime
from common import USER_AGENT
class mylarURLopener(urllib.FancyURLopener):
version = USER_AGENT
class AuthURLOpener(mylarURLopener):
"""
URLOpener class that supports http auth without needing interactive password entry.
If the provided username/password don't work it simply fails.
user: username to use for HTTP auth
pw: password to use for HTTP auth
"""
def __init__(self, user, pw):
self.username = user
self.password = pw
# remember if we've tried the username/password before
self.numTries = 0
# call the base class
urllib.FancyURLopener.__init__(self)
def prompt_user_passwd(self, host, realm):
"""
Override this function and instead of prompting just give the
username/password that were provided when the class was instantiated.
"""
# if this is the first try then provide a username/password
if self.numTries == 0:
self.numTries = 1
return (self.username, self.password)
# if we've tried before then return blank which cancels the request
else:
return ('', '')
# this is pretty much just a hack for convenience
def openit(self, url):
self.numTries = 0
return mylarURLopener.open(self, url)
class SearchResult:
"""
Represents a search result from an indexer.
"""
def __init__(self):
self.provider = -1
# URL to the NZB/torrent file
self.url = ""
# used by some providers to store extra info associated with the result
self.extraInfo = []
# quality of the release
self.quality = -1
# release name
self.name = ""
def __str__(self):
if self.provider == None:
return "Invalid provider, unable to print self"
myString = self.provider.name + " @ " + self.url + "\n"
myString += "Extra Info:\n"
for extra in self.extraInfo:
myString += " " + extra + "\n"
return myString
class NZBSearchResult(SearchResult):
"""
Regular NZB result with an URL to the NZB
"""
resultType = "nzb"
class NZBDataSearchResult(SearchResult):
"""
NZB result where the actual NZB XML data is stored in the extraInfo
"""
resultType = "nzbdata"
class TorrentSearchResult(SearchResult):
"""
Torrent result with an URL to the torrent
"""
resultType = "torrent"
class Proper:
def __init__(self, name, url, date):
self.name = name
self.url = url
self.date = date
self.provider = None
self.quality = -1
self.tvdbid = -1
self.season = -1
self.episode = -1
def __str__(self):
return str(self.date)+" "+self.name+" "+str(self.season)+"x"+str(self.episode)+" of "+str(self.tvdbid)

View File

@ -15,9 +15,10 @@ from subprocess import CalledProcessError, check_output
import mylar
from mylar import logger
from mylar.helpers import cvapi_check
def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
logger.fdebug('dirName:' + dirName)
logger.fdebug('[META-TAGGING] dirName:' + dirName)
## Set the directory in which comictagger and other external commands are located - IMPORTANT - ##
# ( User may have to modify, depending on their setup, but these are some guesses for now )
@ -40,8 +41,8 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
if not os.path.isfile(unrar_cmd):
unrar_cmd = "C:\Program Files (x86)\WinRAR\UnRAR.exe"
if not os.path.isfile(unrar_cmd):
logger.fdebug('Unable to locate UnRAR.exe - make sure it is installed.')
logger.fdebug('Aborting meta-tagging.')
logger.fdebug('[META-TAGGING] Unable to locate UnRAR.exe - make sure it is installed.')
logger.fdebug('[META-TAGGING] Aborting meta-tagging.')
return "fail"
@ -55,8 +56,8 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
try:
import configparser
except ImportError:
logger.fdebug('configparser not found on system. Please install manually in order to write metadata')
logger.fdebug('continuing with PostProcessing, but I am not using metadata.')
logger.fdebug('[META-TAGGING] configparser not found on system. Please install manually in order to write metadata')
logger.fdebug('[META-TAGGING] continuing with PostProcessing, but I am not using metadata.')
return "fail"
#set this to the lib path (ie. '<root of mylar>/lib')
@ -71,9 +72,9 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
file_conversion = True
file_extension_fixing = True
if not os.path.exists( unrar_cmd ):
logger.fdebug('WARNING: cannot find the unrar command.')
logger.fdebug('File conversion and extension fixing not available')
logger.fdebug('You probably need to edit this script, or install the missing tool, or both!')
logger.fdebug('[META-TAGGING] WARNING: cannot find the unrar command.')
logger.fdebug('[META-TAGGING] File conversion and extension fixing not available')
logger.fdebug('[META-TAGGING] You probably need to edit this script, or install the missing tool, or both!')
file_conversion = False
file_extension_fixing = False
@ -88,32 +89,32 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
comicpath = os.path.join( downloadpath, issueid )
unrar_folder = os.path.join( comicpath , "unrard" )
logger.fdebug('---directory settings.')
logger.fdebug('scriptname : ' + scriptname)
logger.fdebug('downloadpath : ' + downloadpath)
logger.fdebug('sabnzbdscriptpath : ' + sabnzbdscriptpath)
logger.fdebug('comicpath : ' + comicpath)
logger.fdebug('unrar_folder : ' + unrar_folder)
logger.fdebug('Running the Post-SabNZBd/Mylar script')
logger.fdebug('[META-TAGGING] ---directory settings.')
logger.fdebug('[META-TAGGING] scriptname : ' + scriptname)
logger.fdebug('[META-TAGGING] downloadpath : ' + downloadpath)
logger.fdebug('[META-TAGGING] sabnzbdscriptpath : ' + sabnzbdscriptpath)
logger.fdebug('[META-TAGGING] comicpath : ' + comicpath)
logger.fdebug('[META-TAGGING] unrar_folder : ' + unrar_folder)
logger.fdebug('[META-TAGGING] Running the Post-SabNZBd/Mylar script')
if os.path.exists( comicpath ):
shutil.rmtree( comicpath )
logger.fdebug('attempting to create directory @: ' + str(comicpath))
logger.fdebug('[META-TAGGING] Attempting to create directory @: ' + str(comicpath))
try:
os.makedirs(comicpath)
except OSError:
raise
logger.fdebug('created directory @ : ' + str(comicpath))
logger.fdebug('filename is : ' + str(filename))
logger.fdebug('[META-TAGGING] Created directory @ : ' + str(comicpath))
logger.fdebug('[META-TAGGING] Filename is : ' + str(filename))
if filename is None:
filename_list = glob.glob( os.path.join( downloadpath, "*.cbz" ) )
filename_list.extend( glob.glob( os.path.join( downloadpath, "*.cbr" ) ) )
fcount = 1
for f in filename_list:
if fcount > 1:
logger.fdebug('More than one cbr/cbz within path, performing Post-Process on first file detected: ' + f)
logger.fdebug('[META-TAGGING] More than one cbr/cbz within path, performing Post-Process on first file detected: ' + f)
break
shutil.move( f, comicpath )
filename = f #just the filename itself
@ -128,10 +129,10 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
if filename.endswith('.cbr'):
f = os.path.join( comicpath, filename )
if zipfile.is_zipfile( f ):
logger.fdebug('zipfile detected')
logger.fdebug('[META-TAGGING] zipfile detected')
base = os.path.splitext( f )[0]
shutil.move( f, base + ".cbz" )
logger.fdebug('{0}: renaming {1} to be a cbz'.format( scriptname, os.path.basename( f ) ))
logger.fdebug('[META-TAGGING] {0}: renaming {1} to be a cbz'.format( scriptname, os.path.basename( f ) ))
if file_extension_fixing:
if filename.endswith('.cbz'):
@ -146,7 +147,7 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
if not "is not RAR archive" in rar_test_cmd_output:
base = os.path.splitext( f )[0]
shutil.move( f, base + ".cbr" )
logger.fdebug('{0}: renaming {1} to be a cbr'.format( scriptname, os.path.basename( f ) ))
logger.fdebug('[META-TAGGING] {0}: renaming {1} to be a cbr'.format( scriptname, os.path.basename( f ) ))
# Now rename all CBR files to RAR
if filename.endswith('.cbr'):
@ -159,7 +160,7 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
## Changes any cbr files to cbz files for insertion of metadata ##
if file_conversion:
f = os.path.join( comicpath, filename )
logger.fdebug('{0}: converting {1} to be zip format'.format( scriptname, os.path.basename( f ) ))
logger.fdebug('[META-TAGGING] {0}: converting {1} to be zip format'.format( scriptname, os.path.basename( f ) ))
basename = os.path.splitext( f )[0]
zipname = basename + ".cbz"
@ -168,17 +169,17 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
os.chdir( unrar_folder )
# Extract and zip up
logger.fdebug('{0}: Comicpath is ' + baserar) #os.path.join(comicpath,basename))
logger.fdebug('{0}: Unrar is ' + unrar_folder )
logger.fdebug('[META-TAGGING] {0}: Comicpath is ' + baserar) #os.path.join(comicpath,basename))
logger.fdebug('[META-TAGGING] {0}: Unrar is ' + unrar_folder )
try:
#subprocess.Popen( [ unrar_cmd, "x", os.path.join(comicpath,basename) ] ).communicate()
output = subprocess.check_output( [ unrar_cmd, 'x', baserar ] ) #os.path.join(comicpath,basename) ] )
except CalledProcessError as e:
if e.returncode == 3:
logger.fdebug('[Unrar Error 3] - Broken Archive.')
logger.fdebug('[META-TAGGING] [Unrar Error 3] - Broken Archive.')
elif e.returncode == 1:
logger.fdebug('[Unrar Error 1] - No files to extract.')
logger.fdebug('Marking this as an incomplete download.')
logger.fdebug('[META-TAGGING] [Unrar Error 1] - No files to extract.')
logger.fdebug('[META-TAGGING] Marking this as an incomplete download.')
return "unrar error"
shutil.make_archive( basename, "zip", unrar_folder )
@ -194,27 +195,27 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
try:
with open(f): pass
except:
logger.fdebug('No zip file present')
logger.warn('[META-TAGGING] No zip file present')
return "fail"
base = os.path.splitext( f )[0]
shutil.move( f, base + ".cbz" )
nfilename = base + ".cbz"
else:
logger.fdebug('filename:' + filename)
logger.fdebug('[META-TAGGING] Filename:' + filename)
nfilename = filename
if os.path.isfile( nfilename ):
logger.fdebug('file exists in given location already.')
logger.fdebug('[META-TAGGING] File exists in given location already.')
file_dir, file_n = os.path.split(nfilename)
else:
#remove the IssueID from the path
file_dir = re.sub(issueid, '', comicpath)
file_n = os.path.split(nfilename)[1]
logger.fdebug('converted directory: ' + str(file_dir))
logger.fdebug('converted filename: ' + str(file_n))
logger.fdebug('destination path: ' + os.path.join(dirName,file_n))
logger.fdebug('dirName: ' + dirName)
logger.fdebug('absDirName: ' + os.path.abspath(dirName))
logger.fdebug('[META-TAGGING] Converted directory: ' + str(file_dir))
logger.fdebug('[META-TAGGING] Converted filename: ' + str(file_n))
logger.fdebug('[META-TAGGING] Destination path: ' + os.path.join(dirName,file_n))
logger.fdebug('[META-TAGGING] dirName: ' + dirName)
logger.fdebug('[META-TAGGING] absDirName: ' + os.path.abspath(dirName))
## check comictagger version - less than 1.15.beta - take your chances.
ctversion = subprocess.check_output( [ comictagger_cmd, "--version" ] )
ctend = ctversion.find(':')
@ -222,45 +223,75 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
ctcheck = re.sub('\.', '', ctcheck).strip()
if int(ctcheck) >= int('1115'): #(v1.1.15)
if mylar.COMICVINE_API == mylar.DEFAULT_CVAPI:
logger.fdebug(ctversion[:ctend] + ' being used - no personal ComicVine API Key supplied. Take your chances.')
logger.fdebug('[META-TAGGING] ' + ctversion[:ctend] + ' being used - no personal ComicVine API Key supplied. Take your chances.')
use_cvapi = "False"
else:
logger.fdebug(ctversion[:ctend] + ' being used - using personal ComicVine API key supplied via mylar.')
logger.fdebug('[META-TAGGING] ' + ctversion[:ctend] + ' being used - using personal ComicVine API key supplied via mylar.')
use_cvapi = "True"
else:
logger.fdebug(ctversion[:ctend] + ' being used - personal ComicVine API key not supported in this version. Good luck.')
logger.fdebug('[META-TAGGING] ' + ctversion[:ctend] + ' being used - personal ComicVine API key not supported in this version. Good luck.')
use_cvapi = "False"
if use_cvapi == "True":
## Tag each CBZ, and move it back to original directory ##
if issueid is None:
subprocess.Popen( [ comictagger_cmd, "-s", "-t", "cr", "--cv-api-key", mylar.COMICVINE_API, "-f", "-o", "--verbose", "--nooverwrite", nfilename ] ).communicate()
subprocess.Popen( [ comictagger_cmd, "-s", "-t", "cbl", "--cv-api-key", mylar.COMICVINE_API, "-f", "-o", "--verbose", "--nooverwrite", nfilename ] ).communicate()
else:
subprocess.Popen( [ comictagger_cmd, "-s", "-t", "cr", "--cv-api-key", mylar.COMICVINE_API, "-o", "--id", issueid, "--verbose", "--nooverwrite", nfilename ] ).communicate()
subprocess.Popen( [ comictagger_cmd, "-s", "-t", "cbl", "--cv-api-key", mylar.COMICVINE_API, "-o", "--id", issueid, "--verbose", "--nooverwrite", nfilename ] ).communicate()
else:
if issueid is None:
subprocess.Popen( [ comictagger_cmd, "-s", "-t", "cr", "-f", "-o", "--verbose", "--nooverwrite", nfilename ] ).communicate()
subprocess.Popen( [ comictagger_cmd, "-s", "-t", "cbl", "-f", "-o", "--verbose", "--nooverwrite", nfilename ] ).communicate()
else:
subprocess.Popen( [ comictagger_cmd, "-s", "-t", "cr", "-o", "--id", issueid, "--verbose", "--nooverwrite", nfilename ] ).communicate()
subprocess.Popen( [ comictagger_cmd, "-s", "-t", "cbl", "-o", "--id", issueid, "--verbose", "--nooverwrite", nfilename ] ).communicate()
i = 1
tagcnt = 0
if mylar.CT_TAG_CR:
tagcnt = 1
logger.info('[META-TAGGING] CR Tagging enabled.')
if mylar.CT_TAG_CBL:
if not mylar.CT_TAG_CR: i = 2 #set the tag to start at cbl and end without doing another tagging.
tagcnt = 2
logger.info('[META-TAGGING] CBL Tagging enabled.')
if tagcnt == 0:
logger.warn('[META-TAGGING] You have metatagging enabled, but you have not selected the type(s) of metadata to write. Please fix and re-run manually')
return "fail"
while ( i <= tagcnt ):
if i == 1:
tagtype = "cr" # CR meta-tagging cycle.
tagdisp = 'ComicRack tagging'
elif i == 2:
tagtype = "cbl" #Cbl meta-tagging cycle
tagdisp = 'Comicbooklover tagging'
logger.info('[META-TAGGING] ' + tagdisp + ' meta-tagging processing started.')
#CV API Check here.
if mylar.CVAPI_COUNT == 0 or mylar.CVAPI_COUNT >= 200:
cvapi_check()
## Tag each CBZ, and move it back to original directory ##
if use_cvapi == "True":
if issueid is None:
subprocess.Popen( [ comictagger_cmd, "-s", "-t", tagtype, "--cv-api-key", mylar.COMICVINE_API, "-f", "-o", "--verbose", "--nooverwrite", nfilename ] ).communicate()
else:
subprocess.Popen( [ comictagger_cmd, "-s", "-t", tagtype, "--cv-api-key", mylar.COMICVINE_API, "-o", "--id", issueid, "--verbose", "--nooverwrite", nfilename ] ).communicate()
logger.info('[META-TAGGING] ' + tagtype + ' meta-tagging complete')
#increment CV API counter.
mylar.CVAPI_COUNT +=1
else:
if issueid is None:
subprocess.Popen( [ comictagger_cmd, "-s", "-t", tagtype, "-f", "-o", "--verbose", "--nooverwrite", nfilename ] ).communicate()
else:
subprocess.Popen( [ comictagger_cmd, "-s", "-t", tagtype, "-o", "--id", issueid, "--verbose", "--nooverwrite", nfilename ] ).communicate()
#increment CV API counter.
mylar.CVAPI_COUNT +=1
i+=1
if os.path.exists(os.path.join(os.path.abspath(dirName),file_n)):
logger.fdebug('Unable to move - file already exists.')
logger.fdebug('[META-TAGGING] Unable to move - file already exists.')
else:
shutil.move( os.path.join(comicpath, nfilename), os.path.join(os.path.abspath(dirName),file_n))
#shutil.move( nfilename, os.path.join(os.path.abspath(dirName),file_n))
logger.fdebug('Sucessfully moved file from temporary path.')
logger.fdebug('[META-TAGGING] Sucessfully moved file from temporary path.')
i = 0
os.chdir( mylar.PROG_DIR )
while i < 10:
try:
logger.fdebug('Attempting to remove: ' + comicpath)
logger.fdebug('[META-TAGGING] Attempting to remove: ' + comicpath)
shutil.rmtree( comicpath )
except:
time.sleep(.1)
@ -268,7 +299,7 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
return os.path.join(os.path.abspath(dirName), file_n)
i+=1
logger.fdebug('Failed to remove temporary path : ' + str(comicpath))
logger.fdebug('[META-TAGGING] Failed to remove temporary path : ' + str(comicpath))
return os.path.join(os.path.abspath(dirName),file_n)

View File

@ -21,6 +21,8 @@ import string
import urllib
import lib.feedparser
import mylar
from mylar.helpers import cvapi_check
from bs4 import BeautifulSoup as Soup
def pulldetails(comicid,type,issueid=None,offset=1):
@ -52,9 +54,13 @@ def pulldetails(comicid,type,issueid=None,offset=1):
elif type == 'storyarc':
PULLURL = mylar.CVURL + 'story_arc/?api_key=' + str(comicapi) + '&format=xml&filter=id:' + str(issueid) + '&field_list=cover_date'
#CV API Check here.
if mylar.CVAPI_COUNT == 0 or mylar.CVAPI_COUNT >= 200:
cvapi_check()
#download the file:
file = urllib2.urlopen(PULLURL)
#increment CV API counter.
mylar.CVAPI_COUNT +=1
#convert to string:
data = file.read()
#close file because we dont need it anymore:

View File

@ -1,41 +0,0 @@
# This file is part of Mylar.
#
# Mylar is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mylar is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mylar. If not, see <http://www.gnu.org/licenses/>.
def ex(e):
"""
Returns a string from the exception text if it exists.
"""
# sanity check
if not e.args or not e.args[0]:
return ""
e_message = e.args[0]
# if fixStupidEncodings doesn't fix it then maybe it's not a string, in which case we'll try printing it anyway
if not e_message:
try:
e_message = str(e.args[0])
except:
e_message = ""
return e_message
class mylarException(Exception):
"Generic mylar Exception - should never be thrown, only subclassed"
class NewzbinAPIThrottled(mylarException):
"Newzbin has throttled us, deal with it"

View File

@ -978,15 +978,30 @@ def LoadAlternateSearchNames(seriesname_alt, comicid):
return Alternate_Names
def havetotals():
def havetotals(refreshit=None):
import db, logger
comics = []
myDB = db.DBConnection()
comiclist = myDB.select('SELECT * from comics order by ComicSortName COLLATE NOCASE')
if refreshit is None:
comiclist = myDB.select('SELECT * from comics order by ComicSortName COLLATE NOCASE')
else:
comiclist = []
comicref = myDB.selectone("SELECT * from comics WHERE ComicID=?", [refreshit]).fetchone()
#refreshit is the ComicID passed from the Refresh Series to force/check numerical have totals
comiclist.append({"ComicID": comicref[0],
"Have": comicref[7],
"Total": comicref[8]})
for comic in comiclist:
issue = myDB.select("SELECT * FROM issues WHERE ComicID=?", [comic['ComicID']])
issue = myDB.selectone("SELECT COUNT(*) as count FROM issues WHERE ComicID=?", [comic['ComicID']]).fetchone()
if issue is None:
if refreshit is not None:
logger.fdebug(str(comic['ComicID']) + ' has no issuedata available. Forcing complete Refresh/Rescan')
return True
else:
continue
if mylar.ANNUALS_ON:
annuals_on = True
annual = myDB.selectone("SELECT COUNT(*) as count FROM annuals WHERE ComicID=?", [comic['ComicID']]).fetchone()
@ -1007,7 +1022,13 @@ def havetotals():
continue
if not haveissues:
havetracks = 0
havetracks = 0
if refreshit is not None:
if haveissues > totalissues:
return True # if it's 5/4, send back to updater and don't restore previous status'
else:
return False # if it's 5/5 or 4/5, send back to updater and restore previous status'
try:
percent = (haveissues*100.0)/totalissues
@ -1051,6 +1072,38 @@ def havetotals():
return comics
def cvapi_check(web=None):
import logger
if web is None: logger.fdebug('[ComicVine API] ComicVine API Check Running...')
if mylar.CVAPI_TIME is None:
c_date = now()
c_obj_date = datetime.datetime.strptime(c_date,"%Y-%m-%d %H:%M:%S")
mylar.CVAPI_TIME = c_obj_date
else:
c_obj_date = mylar.CVAPI_TIME
if web is None: logger.fdebug('[ComicVine API] API Start Monitoring Time (~15mins): ' + str(mylar.CVAPI_TIME))
now_date = now()
n_date = datetime.datetime.strptime(now_date,"%Y-%m-%d %H:%M:%S")
if web is None: logger.fdebug('[ComicVine API] Time now: ' + str(n_date))
absdiff = abs(n_date - c_obj_date)
mins = round(((absdiff.days * 24 * 60 * 60 + absdiff.seconds) / 60.0),2)
if mins < 15:
if web is None: logger.info('[ComicVine API] Comicvine API count now at : ' + str(mylar.CVAPI_COUNT) + ' in ' + str(mins) + ' minutes.')
if mylar.CVAPI_COUNT > 200:
cvleft = 15 - mins
if web is None: logger.warn('[ComicVine API] You have already hit your API limit with ' + str(cvleft) + ' minutes. Best be slowing down, cowboy.')
elif mins > 15:
mylar.CVAPI_COUNT = 0
c_date = now()
mylar.CVAPI_TIME = datetime.datetime.strptime(c_date,"%Y-%m-%d %H:%M:%S")
if web is None: logger.info('[ComicVine API] 15 minute API interval resetting [' + str(mylar.CVAPI_TIME) + ']. Resetting API count to : ' + str(mylar.CVAPI_COUNT))
if web is None:
return
else:
line = str(mylar.CVAPI_COUNT) + ' hits / ' + str(mins) + ' minutes'
return line
from threading import Thread
class ThreadWithReturnValue(Thread):

View File

@ -22,7 +22,7 @@ from xml.dom.minidom import parseString, Element
import mylar
from mylar import logger, db, cv
from mylar.helpers import multikeysort, replace_all, cleanName
from mylar.helpers import multikeysort, replace_all, cleanName, cvapi_check
mb_lock = threading.Lock()
@ -39,6 +39,9 @@ def pullsearch(comicapi,comicquery,offset,explicit):
PULLURL = mylar.CVURL + 'volumes?api_key=' + str(comicapi) + '&filter=name:' + u_comicquery + '&field_list=id,name,start_year,site_detail_url,count_of_issues,image,publisher,description&format=xml&offset=' + str(offset) # 2012/22/02 - CVAPI flipped back to offset instead of page
#all these imports are standard on most modern python implementations
#CV API Check here.
if mylar.CVAPI_COUNT == 0 or mylar.CVAPI_COUNT >= 200:
cvapi_check()
#download the file:
try:
file = urllib2.urlopen(PULLURL)
@ -46,6 +49,8 @@ def pullsearch(comicapi,comicquery,offset,explicit):
logger.error('err : ' + str(err))
logger.error("There was a major problem retrieving data from ComicVine - on their end. You'll have to try again later most likely.")
return
#increment CV API counter.
mylar.CVAPI_COUNT +=1
#convert to string:
data = file.read()
#close file because we dont need it anymore:

View File

@ -531,20 +531,20 @@ def torrentdbsearch(seriesname,issue,comicid=None,nzbprov=None):
#0 holds the title/issue and format-type.
while (i < len(torsplit)):
#we'll rebuild the string here so that it's formatted accordingly to be passed back to the parser.
logger.fdebug('section(' + str(i) + '): ' + str(torsplit[i]))
logger.fdebug('section(' + str(i) + '): ' + torsplit[i])
#remove extensions
titletemp = torsplit[i]
titletemp = re.sub('cbr', '', str(titletemp))
titletemp = re.sub('cbz', '', str(titletemp))
titletemp = re.sub('none', '', str(titletemp))
titletemp = re.sub('cbr', '', titletemp)
titletemp = re.sub('cbz', '', titletemp)
titletemp = re.sub('none', '', titletemp)
if i == 0:
rebuiltline = str(titletemp)
rebuiltline = titletemp
else:
rebuiltline = rebuiltline + ' (' + str(titletemp) + ')'
rebuiltline = rebuiltline + ' (' + titletemp + ')'
i+=1
logger.fdebug('rebuiltline is :' + str(rebuiltline))
logger.fdebug('rebuiltline is :' + rebuiltline)
seriesname_mod = seriesname
foundname_mod = torsplit[0]
@ -575,10 +575,10 @@ def torrentdbsearch(seriesname,issue,comicid=None,nzbprov=None):
titleend = formatrem_torsplit[len(formatrem_seriesname):]
titleend = re.sub('\-', '', titleend) #remove the '-' which is unnecessary
#remove extensions
titleend = re.sub('cbr', '', str(titleend))
titleend = re.sub('cbz', '', str(titleend))
titleend = re.sub('none', '', str(titleend))
logger.fdebug('titleend: ' + str(titleend))
titleend = re.sub('cbr', '', titleend)
titleend = re.sub('cbz', '', titleend)
titleend = re.sub('none', '', titleend)
logger.fdebug('titleend: ' + titleend)
sptitle = titleend.split()
extra = ''

File diff suppressed because it is too large Load Diff

View File

@ -59,6 +59,16 @@ def dbUpdate(ComicIDList=None):
logger.fdebug("Gathering the status of all issues for the series.")
issues = myDB.select('SELECT * FROM issues WHERE ComicID=?', [ComicID])
if not issues:
#if issues are None it's probably a bad refresh/maxed out API that resulted in the issue data
#getting wiped out and not refreshed. Setting whack=True will force a complete refresh.
logger.info('No issue data available. This is Whack.')
whack = True
else:
#check for series that are numerically out of whack (ie. 5/4)
logger.info('Checking how out of whack the series is.')
whack = helpers.havetotals(refreshit=ComicID)
annload = [] #initiate the list here so we don't error out below.
if mylar.ANNUALS_ON:
@ -82,85 +92,90 @@ def dbUpdate(ComicIDList=None):
myDB.action('DELETE FROM issues WHERE ComicID=?', [ComicID])
myDB.action('DELETE FROM annuals WHERE ComicID=?', [ComicID])
logger.fdebug("Refreshing the series and pulling in new data using only CV.")
mylar.importer.addComictoDB(ComicID,mismatch,calledfrom='dbupdate',annload=annload)
#reload the annuals here.
issues_new = myDB.select('SELECT * FROM issues WHERE ComicID=?', [ComicID])
annuals = []
ann_list = []
if mylar.ANNUALS_ON:
annuals_list = myDB.select('SELECT * FROM annuals WHERE ComicID=?', [ComicID])
ann_list += annuals_list
issues_new += annuals_list
if whack == False:
mylar.importer.addComictoDB(ComicID,mismatch,calledfrom='dbupdate',annload=annload)
#reload the annuals here.
logger.fdebug("Attempting to put the Status' back how they were.")
icount = 0
#the problem - the loop below will not match on NEW issues that have been refreshed that weren't present in the
#db before (ie. you left Mylar off for abit, and when you started it up it pulled down new issue information)
#need to test if issuenew['Status'] is None, but in a seperate loop below.
fndissue = []
for issue in issues:
for issuenew in issues_new:
#logger.fdebug(str(issue['Issue_Number']) + ' - issuenew:' + str(issuenew['IssueID']) + ' : ' + str(issuenew['Status']))
#logger.fdebug(str(issue['Issue_Number']) + ' - issue:' + str(issue['IssueID']) + ' : ' + str(issue['Status']))
if issuenew['IssueID'] == issue['IssueID'] and issuenew['Status'] != issue['Status']:
ctrlVAL = {"IssueID": issue['IssueID']}
#if the status is None and the original status is either Downloaded / Archived, keep status & stats
if issuenew['Status'] == None and (issue['Status'] == 'Downloaded' or issue['Status'] == 'Archived'):
newVAL = {"Location": issue['Location'],
"ComicSize": issue['ComicSize'],
"Status": issue['Status']}
#if the status is now Downloaded/Snatched, keep status & stats (downloaded only)
elif issuenew['Status'] == 'Downloaded' or issue['Status'] == 'Snatched':
newVAL = {"Location": issue['Location'],
"ComicSize": issue['ComicSize']}
if issuenew['Status'] == 'Downloaded':
newVAL['Status'] = issuenew['Status']
issues_new = myDB.select('SELECT * FROM issues WHERE ComicID=?', [ComicID])
annuals = []
ann_list = []
if mylar.ANNUALS_ON:
annuals_list = myDB.select('SELECT * FROM annuals WHERE ComicID=?', [ComicID])
ann_list += annuals_list
issues_new += annuals_list
logger.fdebug("Attempting to put the Status' back how they were.")
icount = 0
#the problem - the loop below will not match on NEW issues that have been refreshed that weren't present in the
#db before (ie. you left Mylar off for abit, and when you started it up it pulled down new issue information)
#need to test if issuenew['Status'] is None, but in a seperate loop below.
fndissue = []
for issue in issues:
for issuenew in issues_new:
#logger.fdebug(str(issue['Issue_Number']) + ' - issuenew:' + str(issuenew['IssueID']) + ' : ' + str(issuenew['Status']))
#logger.fdebug(str(issue['Issue_Number']) + ' - issue:' + str(issue['IssueID']) + ' : ' + str(issue['Status']))
if issuenew['IssueID'] == issue['IssueID'] and issuenew['Status'] != issue['Status']:
ctrlVAL = {"IssueID": issue['IssueID']}
#if the status is None and the original status is either Downloaded / Archived, keep status & stats
if issuenew['Status'] == None and (issue['Status'] == 'Downloaded' or issue['Status'] == 'Archived'):
newVAL = {"Location": issue['Location'],
"ComicSize": issue['ComicSize'],
"Status": issue['Status']}
#if the status is now Downloaded/Snatched, keep status & stats (downloaded only)
elif issuenew['Status'] == 'Downloaded' or issue['Status'] == 'Snatched':
newVAL = {"Location": issue['Location'],
"ComicSize": issue['ComicSize']}
if issuenew['Status'] == 'Downloaded':
newVAL['Status'] = issuenew['Status']
else:
newVAL['Status'] = issue['Status']
elif issue['Status'] == 'Archived':
newVAL = {"Status": issue['Status'],
"Location": issue['Location'],
"ComicSize": issue['ComicSize']}
else:
newVAL['Status'] = issue['Status']
#change the status to the previous status
newVAL = {"Status": issue['Status']}
elif issue['Status'] == 'Archived':
newVAL = {"Status": issue['Status'],
"Location": issue['Location'],
"ComicSize": issue['ComicSize']}
else:
#change the status to the previous status
newVAL = {"Status": issue['Status']}
if newVAL['Status'] == None:
newVAL = {"Status": "Skipped"}
if newVAL['Status'] == None:
newVAL = {"Status": "Skipped"}
if any(d['IssueID'] == str(issue['IssueID']) for d in ann_list):
#logger.fdebug("annual detected for " + str(issue['IssueID']) + " #: " + str(issue['Issue_Number']))
myDB.upsert("Annuals", newVAL, ctrlVAL)
else:
#logger.fdebug('#' + str(issue['Issue_Number']) + ' writing issuedata: ' + str(newVAL))
myDB.upsert("Issues", newVAL, ctrlVAL)
fndissue.append({"IssueID": issue['IssueID']})
icount+=1
break
logger.info("In the process of converting the data to CV, I changed the status of " + str(icount) + " issues.")
if any(d['IssueID'] == str(issue['IssueID']) for d in ann_list):
#logger.fdebug("annual detected for " + str(issue['IssueID']) + " #: " + str(issue['Issue_Number']))
myDB.upsert("Annuals", newVAL, ctrlVAL)
else:
#logger.fdebug('#' + str(issue['Issue_Number']) + ' writing issuedata: ' + str(newVAL))
myDB.upsert("Issues", newVAL, ctrlVAL)
fndissue.append({"IssueID": issue['IssueID']})
icount+=1
break
logger.info("In the process of converting the data to CV, I changed the status of " + str(icount) + " issues.")
issues_new = myDB.select('SELECT * FROM issues WHERE ComicID=? AND Status is NULL', [ComicID])
if mylar.ANNUALS_ON:
issues_new += myDB.select('SELECT * FROM annuals WHERE ComicID=? AND Status is NULL', [ComicID])
issues_new = myDB.select('SELECT * FROM issues WHERE ComicID=? AND Status is NULL', [ComicID])
if mylar.ANNUALS_ON:
issues_new += myDB.select('SELECT * FROM annuals WHERE ComicID=? AND Status is NULL', [ComicID])
newiss = []
if mylar.AUTOWANT_UPCOMING:
newstatus = "Wanted"
else:
newstatus = "Skipped"
for iss in issues_new:
newiss.append({"IssueID": iss['IssueID'],
newiss = []
if mylar.AUTOWANT_UPCOMING:
newstatus = "Wanted"
else:
newstatus = "Skipped"
for iss in issues_new:
newiss.append({"IssueID": iss['IssueID'],
"Status": newstatus})
if len(newiss) > 0:
for newi in newiss:
ctrlVAL = {"IssueID": newi['IssueID']}
newVAL = {"Status": newi['Status']}
#logger.fdebug('writing issuedata: ' + str(newVAL))
myDB.upsert("Issues", newVAL, ctrlVAL)
if len(newiss) > 0:
for newi in newiss:
ctrlVAL = {"IssueID": newi['IssueID']}
newVAL = {"Status": newi['Status']}
#logger.fdebug('writing issuedata: ' + str(newVAL))
myDB.upsert("Issues", newVAL, ctrlVAL)
logger.info('I have added ' + str(len(newiss)) + ' new issues for this series that were not present before.')
logger.info('I have added ' + str(len(newiss)) + ' new issues for this series that were not present before.')
else:
mylar.importer.addComictoDB(ComicID,mismatch,annload=annload)
else:
mylar.importer.addComictoDB(ComicID,mismatch)

View File

@ -52,6 +52,16 @@ def serve_template(templatename, **kwargs):
class WebInterface(object):
# def filter_request():
# request = cherrypy.request
# if mylar.HTTPS_FORCE_ON:
# request.base = request.base.replace('http://', 'https://')
# cherrypy.tools.filter_request = cherrypy.Tool('before_request_body', filter_request)
# _cp_config = { 'tools.filter_reqeust_on': True }
def index(self):
if mylar.SAFESTART:
raise cherrypy.HTTPRedirect("manageComics")
@ -473,9 +483,21 @@ class WebInterface(object):
else:
if mylar.CV_ONETIMER == 1:
logger.fdebug("CV_OneTimer option enabled...")
#in order to update to JUST CV_ONLY, we need to delete the issues for a given series so it's a clea$
#in order to update to JUST CV_ONLY, we need to delete the issues for a given series so it's a clean grab.
logger.fdebug("Gathering the status of all issues for the series.")
issues = myDB.select('SELECT * FROM issues WHERE ComicID=?', [ComicID])
if not issues:
#if issues are None it's probably a bad refresh/maxed out API that resulted in the issue data
#getting wiped out and not refreshed. Setting whack=True will force a complete refresh.
logger.info('No issue data available. This is Whack.')
whack = True
else:
#check for series that are numerically out of whack (ie. 5/4)
logger.info('Checking how out of whack the series is.')
whack = helpers.havetotals(refreshit=ComicID)
annload = [] #initiate the list here so we don't error out below.
@ -500,86 +522,90 @@ class WebInterface(object):
myDB.action('DELETE FROM issues WHERE ComicID=?', [ComicID])
myDB.action('DELETE FROM annuals WHERE ComicID=?', [ComicID])
logger.fdebug("Refreshing the series and pulling in new data using only CV.")
mylar.importer.addComictoDB(ComicID,mismatch,calledfrom='dbupdate',annload=annload)
#reload the annuals here.
if whack == False:
mylar.importer.addComictoDB(ComicID,mismatch,calledfrom='dbupdate',annload=annload)
#reload the annuals here.
issues_new = myDB.select('SELECT * FROM issues WHERE ComicID=?', [ComicID])
annuals = []
ann_list = []
if mylar.ANNUALS_ON:
annuals_list = myDB.select('SELECT * FROM annuals WHERE ComicID=?', [ComicID])
ann_list += annuals_list
issues_new += annuals_list
issues_new = myDB.select('SELECT * FROM issues WHERE ComicID=?', [ComicID])
annuals = []
ann_list = []
if mylar.ANNUALS_ON:
annuals_list = myDB.select('SELECT * FROM annuals WHERE ComicID=?', [ComicID])
ann_list += annuals_list
issues_new += annuals_list
logger.fdebug("Attempting to put the Status' back how they were.")
icount = 0
#the problem - the loop below will not match on NEW issues that have been refreshed that weren't present in the
#db before (ie. you left Mylar off for abit, and when you started it up it pulled down new issue information)
#need to test if issuenew['Status'] is None, but in a seperate loop below.
fndissue = []
for issue in issues:
for issuenew in issues_new:
#logger.fdebug(str(issue['Issue_Number']) + ' - issuenew:' + str(issuenew['IssueID']) + ' : ' + str(issuenew['Status']))
#logger.fdebug(str(issue['Issue_Number']) + ' - issue:' + str(issue['IssueID']) + ' : ' + str(issue['Status']))
if issuenew['IssueID'] == issue['IssueID'] and issuenew['Status'] != issue['Status']:
ctrlVAL = {"IssueID": issue['IssueID']}
#if the status is None and the original status is either Downloaded / Archived, keep status & stats
if issuenew['Status'] == None and (issue['Status'] == 'Downloaded' or issue['Status'] == 'Archived'):
newVAL = {"Location": issue['Location'],
"ComicSize": issue['ComicSize'],
"Status": issue['Status']}
#if the status is now Downloaded/Snatched, keep status & stats (downloaded only)
elif issuenew['Status'] == 'Downloaded' or issue['Status'] == 'Snatched':
newVAL = {"Location": issue['Location'],
"ComicSize": issue['ComicSize']}
if issuenew['Status'] == 'Downloaded':
newVAL['Status'] = issuenew['Status']
else:
newVAL['Status'] = issue['Status']
logger.fdebug("Attempting to put the Status' back how they were.")
icount = 0
#the problem - the loop below will not match on NEW issues that have been refreshed that weren't present in the
#db before (ie. you left Mylar off for abit, and when you started it up it pulled down new issue information)
#need to test if issuenew['Status'] is None, but in a seperate loop below.
fndissue = []
for issue in issues:
for issuenew in issues_new:
#logger.fdebug(str(issue['Issue_Number']) + ' - issuenew:' + str(issuenew['IssueID']) + ' : ' + str(issuenew['Status']))
#logger.fdebug(str(issue['Issue_Number']) + ' - issue:' + str(issue['IssueID']) + ' : ' + str(issue['Status']))
if issuenew['IssueID'] == issue['IssueID'] and issuenew['Status'] != issue['Status']:
ctrlVAL = {"IssueID": issue['IssueID']}
#if the status is None and the original status is either Downloaded / Archived, keep status & stats
if issuenew['Status'] == None and (issue['Status'] == 'Downloaded' or issue['Status'] == 'Archived'):
newVAL = {"Location": issue['Location'],
"ComicSize": issue['ComicSize'],
"Status": issue['Status']}
#if the status is now Downloaded/Snatched, keep status & stats (downloaded only)
elif issuenew['Status'] == 'Downloaded' or issue['Status'] == 'Snatched':
newVAL = {"Location": issue['Location'],
"ComicSize": issue['ComicSize']}
if issuenew['Status'] == 'Downloaded':
newVAL['Status'] = issuenew['Status']
else:
newVAL['Status'] = issue['Status']
elif issue['Status'] == 'Archived':
newVAL = {"Status": issue['Status'],
"Location": issue['Location'],
"ComicSize": issue['ComicSize']}
else:
#change the status to the previous status
newVAL = {"Status": issue['Status']}
elif issue['Status'] == 'Archived':
newVAL = {"Status": issue['Status'],
"Location": issue['Location'],
"ComicSize": issue['ComicSize']}
else:
#change the status to the previous status
newVAL = {"Status": issue['Status']}
if newVAL['Status'] == None:
newVAL = {"Status": "Skipped"}
if newVAL['Status'] == None:
newVAL = {"Status": "Skipped"}
if any(d['IssueID'] == str(issue['IssueID']) for d in ann_list):
logger.fdebug("annual detected for " + str(issue['IssueID']) + " #: " + str(issue['Issue_Number']))
myDB.upsert("Annuals", newVAL, ctrlVAL)
else:
#logger.fdebug('#' + str(issue['Issue_Number']) + ' writing issuedata: ' + str(newVAL))
myDB.upsert("Issues", newVAL, ctrlVAL)
fndissue.append({"IssueID": issue['IssueID']})
icount+=1
break
logger.info("In the process of converting the data to CV, I changed the status of " + str(icount) + " issues.")
if any(d['IssueID'] == str(issue['IssueID']) for d in ann_list):
logger.fdebug("annual detected for " + str(issue['IssueID']) + " #: " + str(issue['Issue_Number']))
myDB.upsert("Annuals", newVAL, ctrlVAL)
else:
logger.fdebug('#' + str(issue['Issue_Number']) + ' writing issuedata: ' + str(newVAL))
myDB.upsert("Issues", newVAL, ctrlVAL)
fndissue.append({"IssueID": issue['IssueID']})
icount+=1
break
logger.info("In the process of converting the data to CV, I changed the status of " + str(icount) + " issues.")
issues_new = myDB.select('SELECT * FROM issues WHERE ComicID=? AND Status is NULL', [ComicID])
if mylar.ANNUALS_ON:
issues_new += myDB.select('SELECT * FROM annuals WHERE ComicID=? AND Status is NULL', [ComicID])
issues_new = myDB.select('SELECT * FROM issues WHERE ComicID=? AND Status is NULL', [ComicID])
if mylar.ANNUALS_ON:
issues_new += myDB.select('SELECT * FROM annuals WHERE ComicID=? AND Status is NULL', [ComicID])
newiss = []
if mylar.AUTOWANT_UPCOMING:
#only mark store date >= current date as Wanted.
newstatus = "Wanted"
else:
newstatus = "Skipped"
for iss in issues_new:
newiss.append({"IssueID": iss['IssueID'],
"Status": newstatus})
if len(newiss) > 0:
for newi in newiss:
ctrlVAL = {"IssueID": newi['IssueID']}
newVAL = {"Status": newi['Status']}
#logger.info('writing issuedata: ' + str(newVAL))
myDB.upsert("Issues", newVAL, ctrlVAL)
newiss = []
if mylar.AUTOWANT_UPCOMING:
newstatus = "Wanted"
logger.info('I have added ' + str(len(newiss)) + ' new issues for this series that were not present before.')
else:
newstatus = "Skipped"
for iss in issues_new:
newiss.append({"IssueID": iss['IssueID'],
"Status": newstatus})
if len(newiss) > 0:
for newi in newiss:
ctrlVAL = {"IssueID": newi['IssueID']}
newVAL = {"Status": newi['Status']}
logger.info('writing issuedata: ' + str(newVAL))
myDB.upsert("Issues", newVAL, ctrlVAL)
logger.info('I have added ' + str(len(newiss)) + ' new issues for this series that were not present before.')
mylar.importer.addComictoDB(ComicID,mismatch,annload=annload)
else:
mylar.importer.addComictoDB(ComicID,mismatch)
@ -682,9 +708,13 @@ class WebInterface(object):
raise cherrypy.HTTPRedirect("home")
addArtists.exposed = True
def queueissue(self, mode, ComicName=None, ComicID=None, ComicYear=None, ComicIssue=None, IssueID=None, new=False, redirect=None, SeriesYear=None, SARC=None, IssueArcID=None):
#logger.fdebug('ComicID:' + str(ComicID))
#logger.fdebug('mode:' + str(mode))
def queueit(self, **kwargs):
threading.Thread(target=self.queueissue, kwargs=kwargs).start()
queueit.exposed = True
def queueissue(self, mode, ComicName=None, ComicID=None, ComicYear=None, ComicIssue=None, IssueID=None, new=False, redirect=None, SeriesYear=None, SARC=None, IssueArcID=None, manualsearch=None):
logger.fdebug('ComicID:' + str(ComicID))
logger.fdebug('mode:' + str(mode))
now = datetime.datetime.now()
myDB = db.DBConnection()
#mode dictates type of queue - either 'want' for individual comics, or 'series' for series watchlist.
@ -730,17 +760,23 @@ class WebInterface(object):
logger.info(u"Downloaded " + ComicName + " " + ComicIssue )
raise cherrypy.HTTPRedirect("pullist")
#return
elif mode == 'want' or mode == 'want_ann':
elif mode == 'want' or mode == 'want_ann' or manualsearch:
cdname = myDB.selectone("SELECT ComicName from comics where ComicID=?", [ComicID]).fetchone()
ComicName = cdname['ComicName']
controlValueDict = {"IssueID": IssueID}
newStatus = {"Status": "Wanted"}
if mode == 'want':
logger.info(u"Marking " + ComicName + " issue: " + ComicIssue + " as wanted...")
myDB.upsert("issues", newStatus, controlValueDict)
if manualsearch:
logger.info('Initiating manual search for ' + ComicName + ' issue: ' + ComicIssue)
else:
logger.info(u"Marking " + ComicName + " issue: " + ComicIssue + " as wanted...")
myDB.upsert("issues", newStatus, controlValueDict)
else:
logger.info(u"Marking " + ComicName + " Annual: " + ComicIssue + " as wanted...")
myDB.upsert("annuals", newStatus, controlValueDict)
if manualsearch:
logger.info('Initiating manual search for ' + ComicName + ' Annual: ' + ComicIssue)
else:
logger.info(u"Marking " + ComicName + " Annual: " + ComicIssue + " as wanted...")
myDB.upsert("annuals", newStatus, controlValueDict)
#---
#this should be on it's own somewhere
#if IssueID is not None:
@ -2399,6 +2435,8 @@ class WebInterface(object):
"post_processing" : helpers.checked(mylar.POST_PROCESSING),
"enable_meta" : helpers.checked(mylar.ENABLE_META),
"cmtagger_path" : mylar.CMTAGGER_PATH,
"ct_tag_cr" : helpers.checked(mylar.CT_TAG_CR),
"ct_tag_cbl" : helpers.checked(mylar.CT_TAG_CBL),
"branch" : version.MYLAR_VERSION,
"br_type" : mylar.INSTALL_TYPE,
"br_version" : mylar.versioncheck.getVersion(),
@ -2580,7 +2618,7 @@ class WebInterface(object):
nzbget_host=None, nzbget_port=None, nzbget_username=None, nzbget_password=None, nzbget_category=None, nzbget_priority=None, nzbget_directory=None,
usenet_retention=None, nzbsu=0, nzbsu_uid=None, nzbsu_apikey=None, dognzb=0, dognzb_uid=None, dognzb_apikey=None, newznab=0, newznab_host=None, newznab_name=None, newznab_apikey=None, newznab_uid=None, newznab_enabled=0,
raw=0, raw_provider=None, raw_username=None, raw_password=None, raw_groups=None, experimental=0,
enable_meta=0, cmtagger_path=None, enable_rss=0, rss_checkinterval=None, enable_torrent_search=0, enable_kat=0, enable_cbt=0, cbt_passkey=None, snatchedtorrent_notify=0,
enable_meta=0, cmtagger_path=None, ct_tag_cr=0, ct_tag_cbl=0, enable_rss=0, rss_checkinterval=None, enable_torrent_search=0, enable_kat=0, enable_cbt=0, cbt_passkey=None, snatchedtorrent_notify=0,
enable_torrents=0, minseeds=0, torrent_local=0, local_watchdir=None, torrent_seedbox=0, seedbox_watchdir=None, seedbox_user=None, seedbox_pass=None, seedbox_host=None, seedbox_port=None,
prowl_enabled=0, prowl_onsnatch=0, prowl_keys=None, prowl_priority=None, nma_enabled=0, nma_apikey=None, nma_priority=0, nma_onsnatch=0, pushover_enabled=0, pushover_onsnatch=0, pushover_apikey=None, pushover_userkey=None, pushover_priority=None, boxcar_enabled=0, boxcar_onsnatch=0, boxcar_token=None,
pushbullet_enabled=0, pushbullet_apikey=None, pushbullet_deviceid=None, pushbullet_onsnatch=0,
@ -2706,6 +2744,8 @@ class WebInterface(object):
mylar.PRE_SCRIPTS = pre_scripts
mylar.ENABLE_META = enable_meta
mylar.CMTAGGER_PATH = cmtagger_path
mylar.CT_TAG_CR = ct_tag_cr
mylar.CT_TAG_CBL = ct_tag_cbl
mylar.LOG_DIR = log_dir
mylar.LOG_LEVEL = log_level
mylar.CHMOD_DIR = chmod_dir

View File

@ -22,7 +22,7 @@ class AuthURLOpener(urllib.FancyURLopener):
return urllib.FancyURLopener.open(self, url)
def processEpisode(dirName, nzbName=None):
def processIssue(dirName, nzbName=None):
config = ConfigParser.ConfigParser()
configFilename = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessComics.cfg")
@ -81,7 +81,7 @@ def processEpisode(dirName, nzbName=None):
for line in result:
print line
if any("Post Processing SUCCESSFULL" in s for s in result):
if any("Post Processing SUCCESSFUL" in s for s in result):
return 0
else:
return 1

View File

@ -26,11 +26,11 @@ if os.environ.has_key('NZBOP_SCRIPTDIR') and not os.environ['NZBOP_VERSION'][0:5
POSTPROCESS_NONE=95
#Start script
result = autoProcessComics.processEpisode(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBNAME'])
result = autoProcessComics.processIssue(os.environ['NZBPP_DIRECTORY'], os.environ['NZBPP_NZBNAME'])
elif len(sys.argv) == NZBGET_NO_OF_ARGUMENTS:
result = autoProcessComics.processEpisode(sys.argv[1], sys.argv[2], sys.argv[3])
result = autoProcessComics.processIssue(sys.argv[1], sys.argv[2], sys.argv[3])
if result == 0:
if os.environ.has_key('NZBOP_SCRIPTDIR'): # log success for nzbget

View File

@ -17,6 +17,6 @@ if len(sys.argv) < 2:
print "No folder supplied - is this being called from SABnzbd or NZBGet?"
sys.exit()
elif len(sys.argv) >= 3:
sys.exit(autoProcessComics.processEpisode(sys.argv[1], sys.argv[3]))
sys.exit(autoProcessComics.processIssue(sys.argv[1], sys.argv[3]))
else:
sys.exit(autoProcessComics.processEpisode(sys.argv[1]))
sys.exit(autoProcessComics.processIssue(sys.argv[1]))