mirror of https://github.com/evilhero/mylar
IMP: Redid configuration completely - added config module and changed structures accordingly. Allow for minimal ini configuration (minimal_ini=True). Updated global/config variables throughout each module as required. Provider order sequence now generated on each save (no longer requires a restart). Bunch of other fixes that were either part of the configuration changeover, or the result of and had to be changed/fixed in order for the this to work
This commit is contained in:
parent
8673896d2f
commit
c95b3d847c
48
Mylar.py
48
Mylar.py
|
@ -86,8 +86,7 @@ def main():
|
|||
mylar.QUIET = True
|
||||
|
||||
# Do an intial setup of the logger.
|
||||
logger.initLogger(console=not mylar.QUIET, log_dir=False,
|
||||
verbose=mylar.VERBOSE)
|
||||
logger.initLogger(console=not mylar.QUIET, log_dir=False, init=True, verbose=mylar.VERBOSE)
|
||||
|
||||
#if args.update:
|
||||
# print('Attempting to update Mylar so things can work again...')
|
||||
|
@ -175,7 +174,7 @@ def main():
|
|||
back = os.path.join(backupdir, 'mylar.db')
|
||||
back_1 = os.path.join(backupdir, 'mylar.db.1')
|
||||
else:
|
||||
ogfile = mylar.CONFIG_FILE
|
||||
ogfile = config_file
|
||||
back = os.path.join(backupdir, 'config.ini')
|
||||
back_1 = os.path.join(backupdir, 'config.ini.1')
|
||||
|
||||
|
@ -196,46 +195,51 @@ def main():
|
|||
|
||||
i += 1
|
||||
|
||||
from configobj import ConfigObj
|
||||
mylar.CFG = ConfigObj(mylar.CONFIG_FILE, encoding='utf-8')
|
||||
#from configobj import ConfigObj
|
||||
#mylar.CFG = ConfigObj(mylar.CONFIG_FILE, encoding='utf-8')
|
||||
|
||||
# Read config and start logging
|
||||
try:
|
||||
logger.info('Initializing startup sequence....')
|
||||
mylar.initialize(mylar.CONFIG_FILE)
|
||||
except Exception as e:
|
||||
print e
|
||||
raise SystemExit('FATAL ERROR')
|
||||
|
||||
# Rename the main thread
|
||||
threading.currentThread().name = "MAIN"
|
||||
|
||||
# Read config & start logging
|
||||
mylar.initialize()
|
||||
|
||||
if mylar.DAEMON:
|
||||
mylar.daemonize()
|
||||
|
||||
# Force the http port if neccessary
|
||||
if args.port:
|
||||
http_port = args.port
|
||||
logger.info('Starting Mylar on foced port: %i' % http_port)
|
||||
logger.info('Starting Mylar on forced port: %i' % http_port)
|
||||
else:
|
||||
http_port = int(mylar.HTTP_PORT)
|
||||
http_port = int(mylar.CONFIG.HTTP_PORT)
|
||||
|
||||
# Check if pyOpenSSL is installed. It is required for certificate generation
|
||||
# and for CherryPy.
|
||||
if mylar.ENABLE_HTTPS:
|
||||
if mylar.CONFIG.ENABLE_HTTPS:
|
||||
try:
|
||||
import OpenSSL
|
||||
except ImportError:
|
||||
logger.warn("The pyOpenSSL module is missing. Install this " \
|
||||
"module to enable HTTPS. HTTPS will be disabled.")
|
||||
mylar.ENABLE_HTTPS = False
|
||||
mylar.CONFIG.ENABLE_HTTPS = False
|
||||
|
||||
# Try to start the server. Will exit here is address is already in use.
|
||||
web_config = {
|
||||
'http_port': http_port,
|
||||
'http_host': mylar.HTTP_HOST,
|
||||
'http_root': mylar.HTTP_ROOT,
|
||||
'enable_https': mylar.ENABLE_HTTPS,
|
||||
'https_cert': mylar.HTTPS_CERT,
|
||||
'https_key': mylar.HTTPS_KEY,
|
||||
'https_chain': mylar.HTTPS_CHAIN,
|
||||
'http_username': mylar.HTTP_USERNAME,
|
||||
'http_password': mylar.HTTP_PASSWORD,
|
||||
'http_host': mylar.CONFIG.HTTP_HOST,
|
||||
'http_root': mylar.CONFIG.HTTP_ROOT,
|
||||
'enable_https': mylar.CONFIG.ENABLE_HTTPS,
|
||||
'https_cert': mylar.CONFIG.HTTPS_CERT,
|
||||
'https_key': mylar.CONFIG.HTTPS_KEY,
|
||||
'https_chain': mylar.CONFIG.HTTPS_CHAIN,
|
||||
'http_username': mylar.CONFIG.HTTP_USERNAME,
|
||||
'http_password': mylar.CONFIG.HTTP_PASSWORD,
|
||||
}
|
||||
|
||||
# Try to start the server.
|
||||
|
@ -243,8 +247,8 @@ def main():
|
|||
|
||||
#logger.info('Starting Mylar on port: %i' % http_port)
|
||||
|
||||
if mylar.LAUNCH_BROWSER and not args.nolaunch:
|
||||
mylar.launch_browser(mylar.HTTP_HOST, http_port, mylar.HTTP_ROOT)
|
||||
if mylar.CONFIG.LAUNCH_BROWSER and not args.nolaunch:
|
||||
mylar.launch_browser(mylar.CONFIG.HTTP_HOST, http_port, mylar.CONFIG.HTTP_ROOT)
|
||||
|
||||
# Start the background threads
|
||||
mylar.start()
|
||||
|
|
|
@ -104,7 +104,7 @@
|
|||
</div>
|
||||
<div id="version">
|
||||
Version: <em>${mylar.CURRENT_VERSION}</em>
|
||||
%if mylar.GIT_BRANCH != 'master':
|
||||
%if mylar.CONFIG.GIT_BRANCH != 'master':
|
||||
(${version.MYLAR_VERSION})
|
||||
%endif
|
||||
</div>
|
||||
|
|
|
@ -21,11 +21,11 @@
|
|||
</form>
|
||||
</div>
|
||||
|
||||
%if mylar.RENAME_FILES:
|
||||
%if mylar.CONFIG.RENAME_FILES:
|
||||
<a id="menu_link_refresh" onclick="doAjaxCall('manualRename?comicid=${comic['ComicID']}', $(this),'table')" data-success="Renaming files.">Rename Files</a>
|
||||
%endif
|
||||
<a id="menu_link_refresh" onclick="doAjaxCall('forceRescan?ComicID=${comic['ComicID']}', $(this),true);return true;" data-success="${comic['ComicName']} is being rescanned">Recheck Files</a>
|
||||
%if mylar.ENABLE_META:
|
||||
%if mylar.CONFIG.ENABLE_META:
|
||||
<a id="menu_link_refresh" onclick="doAjaxCall('group_metatag?ComicID=${comic['ComicID']}&dirName=${comic['ComicLocation'] |u}', $(this),'table')" data-success="(re)tagging every issue present for '${comic['ComicName']}'">Manual MetaTagging</a>
|
||||
%endif
|
||||
%if comic['Status'] == 'Paused':
|
||||
|
@ -163,10 +163,10 @@
|
|||
<label><big>Issues in Series: </big><norm>${comic['Total']} issues</norm></label>
|
||||
</div>
|
||||
<div>
|
||||
%if mylar.MULTIPLE_DEST_DIRS is not None and mylar.MULTIPLE_DEST_DIRS != 'None':
|
||||
%if mylar.CONFIG.MULTIPLE_DEST_DIRS is not None and mylar.CONFIG.MULTIPLE_DEST_DIRS != 'None':
|
||||
<%
|
||||
try:
|
||||
archive_path = os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(comic['ComicLocation']))
|
||||
archive_path = os.path.join(mylar.CONFIG.MULTIPLE_DEST_DIRS, os.path.basename(comic['ComicLocation']))
|
||||
except:
|
||||
archive_path = 'None'
|
||||
%>
|
||||
|
@ -261,7 +261,7 @@
|
|||
<input type="radio" name="fuzzy_year" value="0" ${comicConfig['fuzzy_year0']} /> Default <input type="radio" name="fuzzy_year" value="1" ${comicConfig['fuzzy_year1']} /> Year Removal <input type="radio" name="fuzzy_year" value="2" ${comicConfig['fuzzy_year2']} /> Fuzzy the Year
|
||||
</div>
|
||||
|
||||
%if mylar.ENABLE_32P and mylar.MODE_32P == 1:
|
||||
%if mylar.CONFIG.ENABLE_32P and mylar.CONFIG.MODE_32P == 1:
|
||||
<div class="row">
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="allow_packs" value="1" ${comicConfig['allow_packs']} /><label>Enable Pack Downloads<a href="#" title="Will allow downloading of multiple issues in one file (packs), but will search individual issues first"><img src="interfaces/default/images/info32.png" height="16" alt="" /></a></label>
|
||||
</div>
|
||||
|
@ -284,7 +284,7 @@
|
|||
<label for="Skipped" class="checkbox inline Skipped"><input type="checkbox" id="Skipped" checked="checked" /> Skipped: <b>${isCounts['Skipped']}</b></label>
|
||||
<label for="Ignored" class="checkbox inline Ignored"><input type="checkbox" id="Ignored" checked="checked" /> Ignored: <b>${isCounts['Ignored']}</b></label>
|
||||
<label for="Snatched" class="checkbox inline Snatched"><input type="checkbox" id="Snatched" checked="checked" /> Snatched: <b>${isCounts['Snatched']}</b></label>
|
||||
%if mylar.FAILED_DOWNLOAD_HANDLING:
|
||||
%if mylar.CONFIG.FAILED_DOWNLOAD_HANDLING:
|
||||
<label for="Failed" class="checkbox inline Failed"><input type="checkbox" id="Failed" checked="checked" /> Failed: <b>${isCounts['Failed']}</b></label>
|
||||
%endif
|
||||
</div>
|
||||
|
@ -305,7 +305,7 @@
|
|||
<option value="Skipped">Skipped</option>
|
||||
<option value="Archived">Archived</option>
|
||||
<option value="Ignored">Ignored</option>
|
||||
%if mylar.FAILED_DOWNLOAD_HANDLING:
|
||||
%if mylar.CONFIG.FAILED_DOWNLOAD_HANDLING:
|
||||
<option value="Failed">Failed</option>
|
||||
%endif
|
||||
</select>
|
||||
|
@ -377,7 +377,7 @@
|
|||
%elif (issue['Status'] == 'Snatched'):
|
||||
<a href="#" onclick="doAjaxCall('retryit?ComicName=${issue['ComicName'] |u}&ComicID=${issue['ComicID']}&IssueID=${issue['IssueID']}&IssueNumber=${issue['Issue_Number']}&ComicYear=${issue['IssueDate']}', $(this),'table')" data-success="Retrying the same version of '${issue['ComicName']}' '${issue['Issue_Number']}'" title="Retry the same download again"><img src="interfaces/default/images/retry_icon.png" height="25" width="25" class="highqual" /></a>
|
||||
<a href="#" title="Mark issue as Skipped" onclick="doAjaxCall('unqueueissue?IssueID=${issue['IssueID']}&ComicID=${issue['ComicID']}',$(this),'table')" data-success="'${issue['Issue_Number']}' has been marked as Skipped"><img src="interfaces/default/images/skipped_icon.png" height="25" width="25" class="highqual" /></a>
|
||||
%if mylar.FAILED_DOWNLOAD_HANDLING:
|
||||
%if mylar.CONFIG.FAILED_DOWNLOAD_HANDLING:
|
||||
<a href="#" title="Mark issue as Failed" onclick="doAjaxCall('unqueueissue?IssueID=${issue['IssueID']}&ComicID=${issue['ComicID']}&mode=failed',$(this),'table')" data-success="'${issue['Issue_Number']}' has been marked as Failed"><img src="interfaces/default/images/failed.png" height="25" width="25" class="highqual" /></a>
|
||||
%endif
|
||||
%elif (issue['Status'] == 'Downloaded'):
|
||||
|
@ -385,8 +385,8 @@
|
|||
if issue['Location'] is not None:
|
||||
linky = os.path.join(comic['ComicLocation'],issue['Location'])
|
||||
if not os.path.isfile(linky):
|
||||
if mylar.MULTIPLE_DEST_DIRS is not None and mylar.MULTIPLE_DEST_DIRS != 'None':
|
||||
pathdir = os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(comic['ComicLocation']))
|
||||
if mylar.CONFIG.MULTIPLE_DEST_DIRS is not None and mylar.CONFIG.MULTIPLE_DEST_DIRS != 'None':
|
||||
pathdir = os.path.join(mylar.CONFIG.MULTIPLE_DEST_DIRS, os.path.basename(comic['ComicLocation']))
|
||||
linky = os.path.join(pathdir,issue['Location'])
|
||||
else:
|
||||
linky = None
|
||||
|
@ -407,7 +407,7 @@
|
|||
</div>
|
||||
%endif
|
||||
|
||||
%if mylar.ENABLE_META:
|
||||
%if mylar.CONFIG.ENABLE_META:
|
||||
<a href="#" title="Manually meta-tag issue" onclick="doAjaxCall('manual_metatag?dirName=${comic['ComicLocation'] |u}&issueid=${issue['IssueID']}&filename=${linky |u}&comicid=${issue['ComicID']}&comversion=${comic['ComicVersion']}&seriesyear=${comic['ComicYear']}',$(this),'table')" data-success="${comic['ComicName']} #${issue['Issue_Number']} successfully tagged."><img src="interfaces/default/images/comictagger.png" height="25" width="25" class="highqual" /></a>
|
||||
%endif
|
||||
%endif
|
||||
|
@ -514,7 +514,7 @@
|
|||
%elif (annual['Status'] == 'Snatched'):
|
||||
<a href="#" onclick="doAjaxCall('retryit?ComicName=${annual['ComicName'] |u}&ComicID=${annual['ComicID']}&IssueID=${annual['IssueID']}&IssueNumber=${annual['Issue_Number']}&ComicYear=${annual['IssueDate']}&ReleaseComicID=${annual['ReleaseComicID']}', $(this),'table')" data-success="Retrying the same version of '${annual['ComicName']}' '${annual['Issue_Number']}'" title="Retry the same download again"><img src="interfaces/default/images/retry_icon.png" height="25" width="25" class="highqual" /></a>
|
||||
<a href="#" title="Mark annual as Skipped" onclick="doAjaxCall('unqueueissue?IssueID=${annual['IssueID']}&ComicID=${annual['ComicID']}&ReleaseComicID=${annual['ReleaseComicID']}',$(this),'table')" data-success="'${annual['Issue_Number']}' has been marked as skipped"><img src="interfaces/default/images/skipped_icon.png" height="25" width="25" class="highqual" /></a>
|
||||
%if mylar.FAILED_DOWNLOAD_HANDLING:
|
||||
%if mylar.CONFIG.FAILED_DOWNLOAD_HANDLING:
|
||||
<a href="#" title="Mark annual as Failed" onclick="doAjaxCall('unqueueissue?IssueID=${annual['IssueID']}&ComicID=${annual['ComicID']}&ReleaseComicID=${annual['ReleaseComicID']}&mode=failed',$(this),'table')" data-success="'${annual['Issue_Number']}' has been marked as Failed"><img src="interfaces/default/images/failed.png" height="25" width="25" class="highqual" /></a>
|
||||
%endif
|
||||
%elif (annual['Status'] == 'Downloaded'):
|
||||
|
@ -522,8 +522,8 @@
|
|||
if annual['Location'] is not None:
|
||||
linky = os.path.join(comic['ComicLocation'],annual['Location'])
|
||||
if not os.path.isfile(linky):
|
||||
if mylar.MULTIPLE_DEST_DIRS is not None and mylar.MULTIPLE_DEST_DIRS != 'None':
|
||||
pathdir = os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(comic['ComicLocation']))
|
||||
if mylar.CONFIG.MULTIPLE_DEST_DIRS is not None and mylar.CONFIG.MULTIPLE_DEST_DIRS != 'None':
|
||||
pathdir = os.path.join(mylar.CONFIG.MULTIPLE_DEST_DIRS, os.path.basename(comic['ComicLocation']))
|
||||
linky = os.path.join(pathdir,issue['Location'])
|
||||
else:
|
||||
linky = None
|
||||
|
@ -544,7 +544,7 @@
|
|||
</div>
|
||||
%endif
|
||||
|
||||
%if mylar.ENABLE_META:
|
||||
%if mylar.CONFIG.ENABLE_META:
|
||||
<a href="#" title="Manually meta-tag annual" onclick="doAjaxCall('manual_metatag?dirName=${comic['ComicLocation'] |u}&issueid=${annual['IssueID']}&filename=${linky |u}&comicid=${annual['ComicID']}&comversion=${comic['ComicVersion']}',$(this),'table')" data-success="${annual['ComicName']} #${issue['Issue_Number']} successfully tagged."><img src="interfaces/default/images/comictagger.png" height="25" width="25" class="highqual" /></a>
|
||||
%endif
|
||||
%endif
|
||||
|
@ -569,7 +569,7 @@
|
|||
</table>
|
||||
</form>
|
||||
%endif
|
||||
%if mylar.ANNUALS_ON:
|
||||
%if mylar.CONFIG.ANNUALS_ON:
|
||||
<div style="position:relative; width:960px; height:10px; margin:10px auto;">
|
||||
<form action="manual_annual_add" method="GET">
|
||||
<input type="hidden" name="comicid" value=${comic['ComicID']}>
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
<%inherit file="base.html"/>
|
||||
<%!
|
||||
import mylar
|
||||
from operator import itemgetter
|
||||
%>
|
||||
|
||||
<%def name="headerIncludes()">
|
||||
|
@ -189,23 +190,25 @@
|
|||
<small>specify your own CV API key here </small>
|
||||
</div>
|
||||
|
||||
<div class="row checkbox">
|
||||
<input id="api_enabled" type="checkbox" onclick="initConfigCheckbox($(this));" name="api_enabled" value="1" ${config['api_enabled']} /><label>Enable API</label>
|
||||
</div>
|
||||
<div class="apioptions">
|
||||
<div class="row">
|
||||
<label>Mylar API key</label>
|
||||
<input type="text" name="api_key" id="api_key" value="${config['api_key']}" size="20">
|
||||
<input type="button" value="Generate" id="generate_api">
|
||||
<small>Current API key: <strong>${config['api_key']}</strong></small>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="row checkbox">
|
||||
<input id="api_enabled" type="checkbox" name="api_enabled" value="1" ${config['api_enabled']} /><label>Enable API</label>
|
||||
</div>
|
||||
<div id="apioptions">
|
||||
<div class="row">
|
||||
<label>Mylar API key</label>
|
||||
<input type="text" name="api_key" id="api_key" value="${config['api_key']}" size="20">
|
||||
<input type="button" value="Generate" id="generate_api">
|
||||
<small>Current API key: <strong>${config['api_key']}</strong></small>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</fieldset>
|
||||
<fieldset>
|
||||
<legend>Interval</legend>
|
||||
<div class="row">
|
||||
<label>NZB Search Interval</label>
|
||||
<input type="text" name="nzb_search_interval" value="${config['nzb_search_interval']}" size="4">mins
|
||||
<input type="text" name="search_interval" value="${config['search_interval']}" size="4">mins
|
||||
<div class="row checkbox">
|
||||
<input type="checkbox" name="nzb_startup_search" value="1" ${config['nzb_startup_search']} /><label>NZB Search on startup</label>
|
||||
</div>
|
||||
|
@ -594,7 +597,6 @@
|
|||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
</table>
|
||||
</div>
|
||||
<div id="tabs-4">
|
||||
|
@ -614,7 +616,7 @@
|
|||
</fieldset>
|
||||
<fieldset>
|
||||
<div class="row checkbox left clearfix">
|
||||
<input id="usenzbsu" type="checkbox" onclick="initConfigCheckbox($(this));" name="nzbsu" value="1" ${config['use_nzbsu']} /><legend>NZB.SU</legend>
|
||||
<input id="usenzbsu" type="checkbox" onclick="initConfigCheckbox($(this));" name="nzbsu" value="1" ${config['nzbsu']} /><legend>NZB.SU</legend>
|
||||
</div>
|
||||
<div class="config">
|
||||
<div class="row checkbox">
|
||||
|
@ -634,7 +636,7 @@
|
|||
|
||||
<fieldset>
|
||||
<div class="row checkbox left clearfix">
|
||||
<input id="usedognzb" type="checkbox" onclick="initConfigCheckbox($(this));" name="dognzb" value="1" ${config['use_dognzb']} /><legend>DOGNZB</legend>
|
||||
<input id="usedognzb" type="checkbox" onclick="initConfigCheckbox($(this));" name="dognzb" value="1" ${config['dognzb']} /><legend>DOGNZB</legend>
|
||||
</div>
|
||||
<div class="config">
|
||||
<div class="row checkbox">
|
||||
|
@ -649,7 +651,7 @@
|
|||
|
||||
<fieldset>
|
||||
<div class="row checkbox left clearfix">
|
||||
<input id="useexperimental" type="checkbox" onclick="initConfigCheckbox($(this));" name="experimental" value="1" ${config['use_experimental']} /><legend>Use Experimental Search</legend>
|
||||
<input id="useexperimental" type="checkbox" onclick="initConfigCheckbox($(this));" name="experimental" value="1" ${config['experimental']} /><legend>Use Experimental Search</legend>
|
||||
</div>
|
||||
<div>
|
||||
<small class="heading"><span style="float: left; margin-right: .3em; margin-top: 4px;" class="ui-icon ui-icon-info"></span>Note: this is an experimental search - results may be better/worse.</small>
|
||||
|
@ -750,7 +752,7 @@
|
|||
<fieldset>
|
||||
<legend>Newznab</legend>
|
||||
<div class="row checkbox">
|
||||
<input id="usenewznab" type="checkbox" name="newznab" onclick="initConfigCheckbox($(this));" value="1" ${config['use_newznab']} /><label>Use Newznab</label>
|
||||
<input id="usenewznab" type="checkbox" name="newznab" onclick="initConfigCheckbox($(this));" value="1" ${config['newznab']} /><label>Use Newznab</label>
|
||||
</div>
|
||||
<div id="newznab_providers">
|
||||
<%
|
||||
|
@ -804,31 +806,19 @@
|
|||
</div>
|
||||
</fieldset>
|
||||
|
||||
<!--
|
||||
<fieldset>
|
||||
<div id="newznab providers">
|
||||
<%
|
||||
order_number = 1
|
||||
%>
|
||||
|
||||
%for p_order in config['provider_order']:
|
||||
<div class="config" id="p_order${order_number}">
|
||||
<div class="row">
|
||||
<label>${p_order[0]}</label>
|
||||
<label>${p_order[1]}</label>
|
||||
</div>
|
||||
<div>
|
||||
<%
|
||||
order_number += 1
|
||||
%>
|
||||
|
||||
%endfor
|
||||
</div>
|
||||
</fieldset>
|
||||
-->
|
||||
</td>
|
||||
|
||||
</tr>
|
||||
<tr>
|
||||
<%
|
||||
porder = []
|
||||
for k,v in sorted(mylar.CONFIG.PROVIDER_ORDER.iteritems(), key=itemgetter(0), reverse=False):
|
||||
porder.append(v)
|
||||
porder = ', '.join(porder)
|
||||
%>
|
||||
<legend><center>Search Order: ${porder}</center></legend>
|
||||
</tr>
|
||||
|
||||
</table>
|
||||
</div>
|
||||
<div id="tabs-5">
|
||||
|
@ -886,7 +876,7 @@
|
|||
<fieldset>
|
||||
<legend>Duplicate Dump Folder</legend>
|
||||
<div class="row checkbox left clearfix">
|
||||
<input type="checkbox" id="enable_ddump" onclick="initConfigCheckbox($this));" name="ddump" value="1" ${config['ddump']} /><label>Enable Duplicate Dump Folder</label>
|
||||
<input type="checkbox" id="ddump" onclick="initConfigCheckbox($this));" name="ddump" value="1" ${config['ddump']} /><label>Enable Duplicate Dump Folder</label>
|
||||
</div>
|
||||
<div class="config">
|
||||
<div class="row"">
|
||||
|
@ -1102,7 +1092,7 @@
|
|||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="autowant_all" value="1" ${config['autowant_all']} /><label>Automatically Mark All Issues as Wanted</label>
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="comic_cover_local" value="1" ${config['comic_cover_local']} /><label>Place cover.jpg into Comic Directory for each comic</label>
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="cvinfo" value="1" ${config['cvinfo']} /><label>Write cvinfo into each comic directory</label>
|
||||
%if mylar.ENABLE_TORRENTS:
|
||||
%if mylar.CONFIG.ENABLE_TORRENTS:
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="snatchedtorrent_notify" value="1" ${config['snatchedtorrent_notify']} /><label>Manual Post-Processing - Notify on Completed Torrents</label>
|
||||
%endif
|
||||
|
||||
|
@ -1115,7 +1105,7 @@
|
|||
<select name="interface"><h3>
|
||||
%for interface in config['interface_list']:
|
||||
<%
|
||||
if interface == mylar.INTERFACE:
|
||||
if interface == mylar.CONFIG.INTERFACE:
|
||||
selected = 'selected="selected"'
|
||||
else:
|
||||
selected = ''
|
||||
|
@ -1924,7 +1914,7 @@
|
|||
initConfigCheckbox("#replace_spaces");
|
||||
initConfigCheckbox("#use_minsize");
|
||||
initConfigCheckbox("#use_maxsize");
|
||||
initConfigCheckbox("#enable_ddump");
|
||||
initConfigCheckbox("#ddump");
|
||||
initConfigCheckbox("#enable_failed");
|
||||
initConfigCheckbox("#enable_meta");
|
||||
initConfigCheckbox("#zero_level");
|
||||
|
|
|
@ -30,20 +30,20 @@
|
|||
<img src="interfaces/default/images/ultron.png" style="float:right" height="125" width="125" />
|
||||
<fieldset>
|
||||
<div class="row checkbox">
|
||||
<input type="checkbox" name="autoadd" style="vertical-align: middle; margin: 3px; margin-top: -1px;" id="autoadd" value="1" ${checked(mylar.ADD_COMICS)}><label>Auto-add new series</label>
|
||||
<input type="checkbox" name="autoadd" style="vertical-align: middle; margin: 3px; margin-top: -1px;" id="autoadd" value="1" ${checked(mylar.CONFIG.ADD_COMICS)}><label>Auto-add new series</label>
|
||||
</div>
|
||||
<div class="row checkbox">
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_move" id="imp_move" value="1" ${checked(mylar.IMP_MOVE)}><label>Move files</label>
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_move" id="imp_move" value="1" ${checked(mylar.CONFIG.IMP_MOVE)}><label>Move files</label>
|
||||
</div>
|
||||
%if mylar.RENAME_FILES:
|
||||
%if mylar.CONFIG.RENAME_FILES:
|
||||
<div class="row checkbox">
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_rename" id="imp_rename" value="1" ${checked(mylar.IMP_RENAME)}><label>Rename Files </label>
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_rename" id="imp_rename" value="1" ${checked(mylar.CONFIG.IMP_RENAME)}><label>Rename Files </label>
|
||||
<small>(After importing, Rename the files to configuration settings)</small>
|
||||
<label>${mylar.FOLDER_FORMAT}/${mylar.FILE_FORMAT}</label>
|
||||
<label>${mylar.CONFIG.FOLDER_FORMAT}/${mylar.CONFIG.FILE_FORMAT}</label>
|
||||
</div>
|
||||
%endif
|
||||
<div class="row checkbox">
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_metadata" id="imp_metadata" value="1" ${checked(mylar.IMP_METADATA)}><label>Use Existing Metadata</label>
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_metadata" id="imp_metadata" value="1" ${checked(mylar.CONFIG.IMP_METADATA)}><label>Use Existing Metadata</label>
|
||||
<small>(Use existing Metadata to better locate series for import)</small>
|
||||
</div>
|
||||
%if mylar.IMPORTLOCK:
|
||||
|
|
|
@ -74,26 +74,26 @@
|
|||
<br/>
|
||||
<div class="row">
|
||||
<label for="">Path to directory</label>
|
||||
%if mylar.COMIC_DIR:
|
||||
<input type="text" value="${mylar.COMIC_DIR}" name="path" size="70" />
|
||||
%if mylar.CONFIG.COMIC_DIR:
|
||||
<input type="text" value="${mylar.CONFIG.COMIC_DIR}" name="path" size="70" />
|
||||
%else:
|
||||
<input type="text" value="Enter a Comic Directory to scan" onfocus="if
|
||||
(this.value==this.defaultValue) this.value='';" name="path" size="70" />
|
||||
%endif
|
||||
</div>
|
||||
<div class="row checkbox">
|
||||
<input type="checkbox" name="autoadd" id="autoadd" value="1" ${checked(mylar.ADD_COMICS)}><label>Auto-add new series</label>
|
||||
<input type="checkbox" name="autoadd" id="autoadd" value="1" ${checked(mylar.CONFIG.ADD_COMICS)}><label>Auto-add new series</label>
|
||||
</div>
|
||||
<div class="row checkbox">
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_metadata" id="imp_metadata" value="1" ${checked(mylar.IMP_METADATA)}><label>Use existing Metadata</label>
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_metadata" id="imp_metadata" value="1" ${checked(mylar.CONFIG.IMP_METADATA)}><label>Use existing Metadata</label>
|
||||
<small>Use existing Metadata to better locate series for import</small>
|
||||
</div>
|
||||
<div class="row checkbox">
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_move" onclick="initConfigCheckbox($this));" id="imp_move" value="1" ${checked(mylar.IMP_MOVE)}><label>Move files into corresponding Series directory</label>
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_move" onclick="initConfigCheckbox($this));" id="imp_move" value="1" ${checked(mylar.CONFIG.IMP_MOVE)}><label>Move files into corresponding Series directory</label>
|
||||
<small>Leaving this unchecked will not move anything, but will mark the issues as Archived</small>
|
||||
</div>
|
||||
<div class="config">
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_rename" id="imp_rename" value="1" ${checked(mylar.IMP_RENAME)}><label>Rename Files </label>
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="imp_rename" id="imp_rename" value="1" ${checked(mylar.CONFIG.IMP_RENAME)}><label>Rename Files </label>
|
||||
<small>Rename files to configuration settings</small>
|
||||
</div>
|
||||
<br/>
|
||||
|
@ -123,8 +123,8 @@
|
|||
post-processing options.<br/> The remainder will not be moved from the given directory</strong></p>
|
||||
<div class="row">
|
||||
<label for="">Path to Manually Run</label>
|
||||
%if mylar.CHECK_FOLDER:
|
||||
<input type="text" value="${mylar.CHECK_FOLDER}" name="nzb_folder" size="70" />
|
||||
%if mylar.CONFIG.CHECK_FOLDER:
|
||||
<input type="text" value="${mylar.CONFIG.CHECK_FOLDER}" name="nzb_folder" size="70" />
|
||||
%else:
|
||||
<input type="text" value="Enter the full path to post-process" onfocus="if
|
||||
(this.value==this.defaultValue) this.value='';" name="nzb_folder" size="70" />
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
<%def name="headerIncludes()">
|
||||
<div id="subhead_container">
|
||||
<div id="subhead_menu">
|
||||
%if mylar.TAB_ENABLE:
|
||||
%if mylar.CONFIG.TAB_ENABLE:
|
||||
<a id="menu_link_delete" href="#" onclick="doAjaxCall('syncfiles',$(this),'table')" data-success="Syncing complete.">Sync</a>
|
||||
%endif
|
||||
<a id="menu_link_delete" href="#" onclick="doAjaxCall('removefromreadlist?AllRead=1',$(this),'table')" data-success="All Read Records Removed">Remove Read</a>
|
||||
|
@ -69,7 +69,7 @@
|
|||
<fieldset>
|
||||
<legend>ReadList Options</legend>
|
||||
<div class="row checkbox left clearfix">
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="send2read" id="send2read" value="1" ${checked(mylar.SEND2READ)} /><label>Automatically send new pulls to Readlist (Added)</label></br>
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="send2read" id="send2read" value="1" ${checked(mylar.CONFIG.SEND2READ)} /><label>Automatically send new pulls to Readlist (Added)</label></br>
|
||||
</div>
|
||||
</fieldset>
|
||||
</td>
|
||||
|
@ -81,27 +81,27 @@
|
|||
<small class="heading"><span style="float: left; margin-right: .3em; margin-top: 4px;" class="ui-icon ui-icon-info"></span>Requires SFTP Server running on tablet</small>
|
||||
</div>
|
||||
<div class="row checkbox left clearfix">
|
||||
<input id="tabenable" type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" onclick="initConfigCheckbox($this);" name="tab_enable" id="tab_enable" value="1" ${checked(mylar.TAB_ENABLE)} /><label>Enable Tablet (Android)</label>
|
||||
<input id="tabenable" type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" onclick="initConfigCheckbox($this);" name="tab_enable" id="tab_enable" value="1" ${checked(mylar.CONFIG.TAB_ENABLE)} /><label>Enable Tablet (Android)</label>
|
||||
</div>
|
||||
<div class="config">
|
||||
<div class="row">
|
||||
<label>IP:PORT</label>
|
||||
<input type="text" placeholder="IP Address of tablet" name="tab_host" value="${mylar.TAB_HOST}" size="30">
|
||||
<input type="text" placeholder="IP Address of tablet" name="tab_host" value="${mylar.CONFIG.TAB_HOST}" size="30">
|
||||
</div>
|
||||
<div class="row">
|
||||
<label>Username</label>
|
||||
<input type="text" name="tab_user" value="${mylar.TAB_USER}" size="20">
|
||||
<input type="text" name="tab_user" value="${mylar.CONFIG.TAB_USER}" size="20">
|
||||
</div>
|
||||
<div class="row">
|
||||
<label>Password:</label>
|
||||
<input type="password" name="tab_pass" value="${mylar.TAB_PASS}" size="20">
|
||||
<input type="password" name="tab_pass" value="${mylar.CONFIG.TAB_PASS}" size="20">
|
||||
</div>
|
||||
<div class="row">
|
||||
<label>Download Location:</label>
|
||||
<input type="text" placeholder="Full path (or jailed path)" name="tab_directory" value="${mylar.TAB_DIRECTORY}" size="36" /></br>
|
||||
<input type="text" placeholder="Full path (or jailed path)" name="tab_directory" value="${mylar.CONFIG.TAB_DIRECTORY}" size="36" /></br>
|
||||
</div>
|
||||
<div class="row checkbox left clearfix">
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="maintainseriesfolder" id="maintainseriesfolder" value="1" ${checked(mylar.MAINTAINSERIESFOLDER)} /><label for="maintainseriesfolder">Maintain Comic Folder Structure when Syncing to Tablet</label></br>
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="maintainseriesfolder" id="maintainseriesfolder" value="1" ${checked(mylar.CONFIG.MAINTAINSERIESFOLDER)} /><label for="maintainseriesfolder">Maintain Comic Folder Structure when Syncing to Tablet</label></br>
|
||||
</div>
|
||||
</div>
|
||||
</fieldset>
|
||||
|
|
|
@ -42,7 +42,7 @@
|
|||
<form action="CreateFolders" method="GET" id="CreatetheFolders">
|
||||
<fieldset>
|
||||
<div class="row">
|
||||
<input type="checkbox" name="createfolders" id="createfolders" value=1 ${checked(mylar.CREATE_FOLDERS)} /><label>Automatic Folder Creation</label>
|
||||
<input type="checkbox" name="createfolders" id="createfolders" value=1 ${checked(mylar.CONFIG.CREATE_FOLDERS)} /><label>Automatic Folder Creation</label>
|
||||
</div>
|
||||
<input type="submit" style="display:none" />
|
||||
</fieldset>
|
||||
|
|
|
@ -50,18 +50,18 @@
|
|||
<legend>Options</legend>
|
||||
<div class="row checkbox left clearfix">
|
||||
<%
|
||||
storyarcdest = os.path.join(mylar.DESTINATION_DIR, 'StoryArcs')
|
||||
storyarcdest = os.path.join(mylar.CONFIG.DESTINATION_DIR, 'StoryArcs')
|
||||
%>
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" onclick="getOption(this)" name="storyarcdir" id="storyarcdir" value="1" ${checked(mylar.STORYARCDIR)} /><label>Arcs in StoryArc Directory </br><small>(${storyarcdest})</small></label>
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" onclick="getOption(this)" name="storyarcdir" id="storyarcdir" value="1" ${checked(mylar.CONFIG.STORYARCDIR)} /><label>Arcs in StoryArc Directory </br><small>(${storyarcdest})</small></label>
|
||||
</div>
|
||||
<div id="arc_options">
|
||||
<div class="row">
|
||||
<label>Arc Folder Format</label>
|
||||
<input type="text" title="$publisher, $spanyears, $arc" name="arc_folderformat" value="${mylar.ARC_FOLDERFORMAT}" size="25">
|
||||
<input type="text" title="$publisher, $spanyears, $arc" name="arc_folderformat" value="${mylar.CONFIG.ARC_FOLDERFORMAT}" size="25">
|
||||
</div>
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="read2filename" id="read2filename" value="1" ${checked(mylar.READ2FILENAME)} /><label>Append Reading # to filename</label></br>
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="read2filename" id="read2filename" value="1" ${checked(mylar.CONFIG.READ2FILENAME)} /><label>Append Reading # to filename</label></br>
|
||||
</div>
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="copy2arcdir" id="copy2arcdir" value="1" ${checked(mylar.COPY2ARCDIR)} />
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="copy2arcdir" id="copy2arcdir" value="1" ${checked(mylar.CONFIG.COPY2ARCDIR)} />
|
||||
<select name="arc_fileops" id="arc_fileops">
|
||||
<%
|
||||
if 'windows' in mylar.OS_DETECT.lower():
|
||||
|
@ -71,7 +71,7 @@
|
|||
%>
|
||||
%for x in optionlist:
|
||||
<%
|
||||
if mylar.ARC_FILEOPS == x:
|
||||
if mylar.CONFIG.ARC_FILEOPS == x:
|
||||
outputselect = 'selected'
|
||||
else:
|
||||
outputselect = ''
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<%def name="headerIncludes()">
|
||||
<div id="subhead_container">
|
||||
<div id="subhead_menu">
|
||||
%if mylar.TAB_ENABLE:
|
||||
%if mylar.CONFIG.TAB_ENABLE:
|
||||
<a id="menu_link_delete" onclick="doAjaxCall('syncfiles',$(this),'table')" data-success="Successfully sent issues to your device">Sync</a>
|
||||
%endif
|
||||
<a id="menu_link_delete" href="#">Remove Read</a>
|
||||
|
@ -39,22 +39,22 @@
|
|||
<form action="arcOptions" id="chkoptions" method="GET">
|
||||
<fieldset>
|
||||
<div class="row checkbox left clearfix">
|
||||
%if mylar.STORYARCDIR:
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="storyarcdir" id="storyarcdir" value="1" ${checked(mylar.STORYARCDIR)} /><label>Should I create a Story-Arc Directory?</label><br/>
|
||||
%if mylar.CONFIG.STORYARCDIR:
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="storyarcdir" id="storyarcdir" value="1" ${checked(mylar.CONFIG.STORYARCDIR)} /><label>Should I create a Story-Arc Directory?</label><br/>
|
||||
<small>Arcs in StoryArc Directory: ${sdir}</small><br/>
|
||||
%endif
|
||||
<!--
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" /><label>Show Downloaded Story Arc Issues on ReadingList tab</label><br/>
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" /><label>Enforce Renaming/MetaTagging options (if enabled)</label><br/>
|
||||
-->
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="read2filename" id="read2filename" value="1" ${checked(mylar.READ2FILENAME)} /><label>Append Reading# to filename</label><br/>
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="read2filename" id="read2filename" value="1" ${checked(mylar.CONFIG.READ2FILENAME)} /><label>Append Reading# to filename</label><br/>
|
||||
<%
|
||||
if mylar.STORYARCDIR:
|
||||
if mylar.CONFIG.STORYARCDIR:
|
||||
carcdir = 'StoryArc'
|
||||
else:
|
||||
carcdir = 'GrabBag'
|
||||
%>
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="copy2arcdir" id="copy2arcdir" value="1" ${checked(mylar.COPY2ARCDIR)} /><label>Copy watchlisted issues to ${carcdir} Directory</label>
|
||||
<input type="checkbox" style="vertical-align: middle; margin: 3px; margin-top: -1px;" name="copy2arcdir" id="copy2arcdir" value="1" ${checked(mylar.CONFIG.COPY2ARCDIR)} /><label>Copy watchlisted issues to ${carcdir} Directory</label>
|
||||
<input type="hidden" name="StoryArcID" value="${storyarcid}">
|
||||
<input type="hidden" name="StoryArcName" value="${storyarcname}">
|
||||
|
||||
|
|
|
@ -21,12 +21,12 @@
|
|||
<div id="checkboxControls" style="float: right; vertical-align: middle; margin: 5px 3px 3px 3px;">
|
||||
<div style="padding-bottom: 5px;">
|
||||
<label for="Wanted" class="checkbox inline Wanted"><input type="checkbox" id="Wanted" checked="checked" /> Wanted: <b>${isCounts['Wanted']}</b></label>
|
||||
%if mylar.UPCOMING_SNATCHED:
|
||||
%if mylar.CONFIG.UPCOMING_SNATCHED:
|
||||
%if int(isCounts['Snatched']) > 0:
|
||||
<label for="Snatched" class="checkbox inline Snatched"><input type="checkbox" id="Snatched" checked="checked" /> Snatched: <b>${isCounts['Snatched']}</b></label>
|
||||
%endif
|
||||
%endif
|
||||
%if int(isCounts['Failed']) > 0 and mylar.FAILED_DOWNLOAD_HANDLING:
|
||||
%if int(isCounts['Failed']) > 0 and mylar.CONFIG.FAILED_DOWNLOAD_HANDLING:
|
||||
<label for="Failed" class="checkbox inline Failed"><input type="checkbox" id="Failed" checked="checked" /> Failed: <b>${isCounts['Failed']}</b></label>
|
||||
%endif
|
||||
</div>
|
||||
|
|
|
@ -43,7 +43,7 @@
|
|||
<fieldset>
|
||||
<div class="row checkbox left clearfix">
|
||||
</br>
|
||||
<input type="checkbox" name="weekfolder" id="weekfolder" value="1" ${checked(mylar.WEEKFOLDER)} /><label>Store in Weekly Directory (${weekfold})</label>
|
||||
<input type="checkbox" name="weekfolder" id="weekfolder" value="1" ${checked(mylar.CONFIG.WEEKFOLDER)} /><label>Store in Weekly Directory (${weekfold})</label>
|
||||
</div>
|
||||
|
||||
<input type="hidden" name="year" value="${weekinfo['year']}">
|
||||
|
@ -102,6 +102,13 @@
|
|||
%else:
|
||||
<a href="comicDetails?ComicID=${weekly['COMICID']}">${weekly['COMIC']}</a>
|
||||
%endif
|
||||
%if weekly['VOLUME'] is not None:
|
||||
 V${weekly['VOLUME']}
|
||||
%endif
|
||||
|
||||
%if weekly['SERIESYEAR'] is not None:
|
||||
 (${weekly['SERIESYEAR']})
|
||||
%endif
|
||||
</td>
|
||||
<td class="comicnumber">${weekly['ISSUE']}</td>
|
||||
%if weekly['AUTOWANT']:
|
||||
|
|
|
@ -208,7 +208,7 @@ class FailedProcessor(object):
|
|||
logger.info(module + ' Successfully marked as Failed.')
|
||||
self._log('Successfully marked as Failed.')
|
||||
|
||||
if mylar.FAILED_AUTO:
|
||||
if mylar.CONFIG.FAILED_AUTO:
|
||||
logger.info(module + ' Sending back to search to see if we can find something that will not fail.')
|
||||
self._log('Sending back to search to see if we can find something better that will not fail.')
|
||||
self.valreturn.append({"self.log": self.log,
|
||||
|
|
|
@ -62,7 +62,7 @@ class PostProcessor(object):
|
|||
if queue:
|
||||
self.queue = queue
|
||||
|
||||
if mylar.FILE_OPTS == 'copy':
|
||||
if mylar.CONFIG.FILE_OPTS == 'copy':
|
||||
self.fileop = shutil.copy
|
||||
else:
|
||||
self.fileop = shutil.move
|
||||
|
@ -90,13 +90,13 @@ class PostProcessor(object):
|
|||
"""
|
||||
logger.fdebug("initiating pre script detection.")
|
||||
self._log("initiating pre script detection.")
|
||||
logger.fdebug("mylar.PRE_SCRIPTS : " + mylar.PRE_SCRIPTS)
|
||||
self._log("mylar.PRE_SCRIPTS : " + mylar.PRE_SCRIPTS)
|
||||
# for currentScriptName in mylar.PRE_SCRIPTS:
|
||||
with open(mylar.PRE_SCRIPTS, 'r') as f:
|
||||
logger.fdebug("mylar.PRE_SCRIPTS : " + mylar.CONFIG.PRE_SCRIPTS)
|
||||
self._log("mylar.PRE_SCRIPTS : " + mylar.CONFIG.PRE_SCRIPTS)
|
||||
# for currentScriptName in mylar.CONFIG.PRE_SCRIPTS:
|
||||
with open(mylar.CONFIG.PRE_SCRIPTS, 'r') as f:
|
||||
first_line = f.readline()
|
||||
|
||||
if mylar.PRE_SCRIPTS.endswith('.sh'):
|
||||
if mylar.CONFIG.PRE_SCRIPTS.endswith('.sh'):
|
||||
shell_cmd = re.sub('#!', '', first_line).strip()
|
||||
if shell_cmd == '' or shell_cmd is None:
|
||||
shell_cmd = '/bin/bash'
|
||||
|
@ -104,7 +104,7 @@ class PostProcessor(object):
|
|||
#forces mylar to use the executable that it was run with to run the extra script.
|
||||
shell_cmd = sys.executable
|
||||
|
||||
currentScriptName = shell_cmd + ' ' + str(mylar.PRE_SCRIPTS).decode("string_escape")
|
||||
currentScriptName = shell_cmd + ' ' + str(mylar.CONFIG.PRE_SCRIPTS).decode("string_escape")
|
||||
logger.fdebug("pre script detected...enabling: " + str(currentScriptName))
|
||||
# generate a safe command line string to execute the script and provide all the parameters
|
||||
script_cmd = shlex.split(currentScriptName, posix=False) + [str(nzb_name), str(nzb_folder), str(seriesmetadata)]
|
||||
|
@ -131,13 +131,13 @@ class PostProcessor(object):
|
|||
"""
|
||||
logger.fdebug("initiating extra script detection.")
|
||||
self._log("initiating extra script detection.")
|
||||
logger.fdebug("mylar.EXTRA_SCRIPTS : " + mylar.EXTRA_SCRIPTS)
|
||||
self._log("mylar.EXTRA_SCRIPTS : " + mylar.EXTRA_SCRIPTS)
|
||||
# for curScriptName in mylar.EXTRA_SCRIPTS:
|
||||
with open(mylar.EXTRA_SCRIPTS, 'r') as f:
|
||||
logger.fdebug("mylar.EXTRA_SCRIPTS : " + mylar.CONFIG.EXTRA_SCRIPTS)
|
||||
self._log("mylar.EXTRA_SCRIPTS : " + mylar.CONFIG.EXTRA_SCRIPTS)
|
||||
# for curScriptName in mylar.CONFIG.EXTRA_SCRIPTS:
|
||||
with open(mylar.CONFIG.EXTRA_SCRIPTS, 'r') as f:
|
||||
first_line = f.readline()
|
||||
|
||||
if mylar.EXTRA_SCRIPTS.endswith('.sh'):
|
||||
if mylar.CONFIG.EXTRA_SCRIPTS.endswith('.sh'):
|
||||
shell_cmd = re.sub('#!', '', first_line)
|
||||
if shell_cmd == '' or shell_cmd is None:
|
||||
shell_cmd = '/bin/bash'
|
||||
|
@ -145,7 +145,7 @@ class PostProcessor(object):
|
|||
#forces mylar to use the executable that it was run with to run the extra script.
|
||||
shell_cmd = sys.executable
|
||||
|
||||
curScriptName = shell_cmd + ' ' + str(mylar.EXTRA_SCRIPTS).decode("string_escape")
|
||||
curScriptName = shell_cmd + ' ' + str(mylar.CONFIG.EXTRA_SCRIPTS).decode("string_escape")
|
||||
logger.fdebug("extra script detected...enabling: " + str(curScriptName))
|
||||
# generate a safe command line string to execute the script and provide all the parameters
|
||||
script_cmd = shlex.split(curScriptName) + [str(nzb_name), str(nzb_folder), str(filen), str(folderp), str(seriesmetadata)]
|
||||
|
@ -170,26 +170,26 @@ class PostProcessor(object):
|
|||
path_to_move = dupeinfo['to_dupe']
|
||||
file_to_move = os.path.split(path_to_move)[1]
|
||||
|
||||
if dupeinfo['action'] == 'dupe_src' and mylar.FILE_OPTS == 'move':
|
||||
if dupeinfo['action'] == 'dupe_src' and mylar.CONFIG.FILE_OPTS == 'move':
|
||||
logger.info('[DUPLICATE-CLEANUP] New File will be post-processed. Moving duplicate [' + path_to_move + '] to Duplicate Dump Folder for manual intervention.')
|
||||
else:
|
||||
if mylar.FILE_OPTS == 'move':
|
||||
if mylar.CONFIG.FILE_OPTS == 'move':
|
||||
logger.info('[DUPLICATE-CLEANUP][MOVE-MODE] New File will not be post-processed. Moving duplicate [' + path_to_move + '] to Duplicate Dump Folder for manual intervention.')
|
||||
else:
|
||||
logger.info('[DUPLICATE-CLEANUP][COPY-MODE] NEW File will not be post-processed. Retaining file in original location [' + path_to_move + ']')
|
||||
return True
|
||||
|
||||
#this gets tricky depending on if it's the new filename or the existing filename, and whether or not 'copy' or 'move' has been selected.
|
||||
if mylar.FILE_OPTS == 'move':
|
||||
if mylar.CONFIG.FILE_OPTS == 'move':
|
||||
#check to make sure duplicate_dump directory exists:
|
||||
checkdirectory = filechecker.validateAndCreateDirectory(mylar.DUPLICATE_DUMP, True, module='[DUPLICATE-CLEANUP]')
|
||||
checkdirectory = filechecker.validateAndCreateDirectory(mylar.CONFIG.DUPLICATE_DUMP, True, module='[DUPLICATE-CLEANUP]')
|
||||
try:
|
||||
shutil.move(path_to_move, os.path.join(mylar.DUPLICATE_DUMP, file_to_move))
|
||||
shutil.move(path_to_move, os.path.join(mylar.CONFIG.DUPLICATE_DUMP, file_to_move))
|
||||
except (OSError, IOError):
|
||||
logger.warn('[DUPLICATE-CLEANUP] Failed to move ' + path_to_move + ' ... to ... ' + os.path.join(mylar.DUPLICATE_DUMP, file_to_move))
|
||||
logger.warn('[DUPLICATE-CLEANUP] Failed to move ' + path_to_move + ' ... to ... ' + os.path.join(mylar.CONFIG.DUPLICATE_DUMP, file_to_move))
|
||||
return False
|
||||
|
||||
logger.warn('[DUPLICATE-CLEANUP] Successfully moved ' + path_to_move + ' ... to ... ' + os.path.join(mylar.DUPLICATE_DUMP, file_to_move))
|
||||
logger.warn('[DUPLICATE-CLEANUP] Successfully moved ' + path_to_move + ' ... to ... ' + os.path.join(mylar.CONFIG.DUPLICATE_DUMP, file_to_move))
|
||||
return True
|
||||
|
||||
def tidyup(self, odir=None, del_nzbdir=False, sub_path=None):
|
||||
|
@ -198,7 +198,7 @@ class PostProcessor(object):
|
|||
# copy = cleanup/delete cache location (odir) only if enabled.
|
||||
#tidyup old path
|
||||
try:
|
||||
logger.fdebug('File Option: ' + mylar.FILE_OPTS + ' [META-ENABLED: ' + str(mylar.ENABLE_META) + ']')
|
||||
logger.fdebug('File Option: ' + mylar.CONFIG.FILE_OPTS + ' [META-ENABLED: ' + str(mylar.CONFIG.ENABLE_META) + ']')
|
||||
logger.fdebug('odir: ' + odir + ' [self.nzb_folder: ' + self.nzb_folder + ']')
|
||||
#if sub_path exists, then we need to use that in place of self.nzb_folder since the file was in a sub-directory within self.nzb_folder
|
||||
if all([sub_path is not None, sub_path != self.nzb_folder]):
|
||||
|
@ -216,7 +216,7 @@ class PostProcessor(object):
|
|||
#if all([os.path.isdir(odir), self.nzb_folder != tmp_folder]) or any([odir.startswith('mylar_'),del_nzbdir is True]):
|
||||
# check to see if the directory is empty or not.
|
||||
|
||||
if all([mylar.FILE_OPTS == 'move', self.nzb_name == 'Manual Run', tmp_folder != self.nzb_folder]):
|
||||
if all([mylar.CONFIG.FILE_OPTS == 'move', self.nzb_name == 'Manual Run', tmp_folder != self.nzb_folder]):
|
||||
if not os.listdir(tmp_folder):
|
||||
logger.fdebug(self.module + ' Tidying up. Deleting sub-folder location : ' + tmp_folder)
|
||||
shutil.rmtree(tmp_folder)
|
||||
|
@ -225,7 +225,7 @@ class PostProcessor(object):
|
|||
self._log('Failed to remove temporary directory: ' + tmp_folder)
|
||||
logger.error(self.module + ' ' + tmp_folder + ' not empty. Skipping removal of directory - this will either be caught in further post-processing or it will have to be manually deleted.')
|
||||
|
||||
elif mylar.FILE_OPTS == 'move' and all([del_nzbdir is True, self.nzb_name != 'Manual Run']): #tmp_folder != self.nzb_folder]):
|
||||
elif mylar.CONFIG.FILE_OPTS == 'move' and all([del_nzbdir is True, self.nzb_name != 'Manual Run']): #tmp_folder != self.nzb_folder]):
|
||||
if not os.listdir(tmp_folder):
|
||||
logger.fdebug(self.module + ' Tidying up. Deleting original folder location : ' + tmp_folder)
|
||||
shutil.rmtree(tmp_folder)
|
||||
|
@ -234,7 +234,7 @@ class PostProcessor(object):
|
|||
self._log('Failed to remove temporary directory: ' + tmp_folder)
|
||||
logger.error(self.module + ' ' + tmp_folder + ' not empty. Skipping removal of directory - this will either be caught in further post-processing or it will have to be manually deleted.')
|
||||
|
||||
if mylar.ENABLE_META and all([os.path.isdir(odir), 'mylar_' in odir]):
|
||||
if mylar.CONFIG.ENABLE_META and all([os.path.isdir(odir), 'mylar_' in odir]):
|
||||
#Regardless of the copy/move operation, we need to delete the files from within the cache directory, then remove the cache directory itself for the given issue.
|
||||
#sometimes during a meta, it retains the cbr as well after conversion depending on settings. Make sure to delete too thus the 'walk'.
|
||||
for filename in os.listdir(odir):
|
||||
|
@ -269,14 +269,14 @@ class PostProcessor(object):
|
|||
logger.fdebug(module + ' Not using SABnzbd : Manual Run')
|
||||
else:
|
||||
# if the SAB Directory option is enabled, let's use that folder name and append the jobname.
|
||||
if all([mylar.SAB_TO_MYLAR, mylar.SAB_DIRECTORY is not None, mylar.SAB_DIRECTORY != 'None', len(mylar.SAB_DIRECTORY) > 4]):
|
||||
self.nzb_folder = os.path.join(mylar.SAB_DIRECTORY, self.nzb_name).encode(mylar.SYS_ENCODING)
|
||||
if all([mylar.CONFIG.SAB_TO_MYLAR, mylar.CONFIG.SAB_DIRECTORY is not None, mylar.CONFIG.SAB_DIRECTORY != 'None']):
|
||||
self.nzb_folder = os.path.join(mylar.CONFIG.SAB_DIRECTORY, self.nzb_name).encode(mylar.SYS_ENCODING)
|
||||
logger.fdebug(module + ' SABnzbd Download folder option enabled. Directory set to : ' + self.nzb_folder)
|
||||
|
||||
# -- start. not used.
|
||||
#query SAB to find out if Replace Spaces enabled / not as well as Replace Decimals
|
||||
#http://localhost:8080/sabnzbd/api?mode=set_config§ion=misc&keyword=dirscan_speed&value=5
|
||||
#querysab = str(mylar.SAB_HOST) + "/api?mode=get_config§ion=misc&output=xml&apikey=" + str(mylar.SAB_APIKEY)
|
||||
#querysab = str(mylar.CONFIG.SAB_HOST) + "/api?mode=get_config§ion=misc&output=xml&apikey=" + str(mylar.CONFIG.SAB_APIKEY)
|
||||
#logger.info("querysab_string:" + str(querysab))
|
||||
#file = urllib2.urlopen(querysab)
|
||||
#data = file.read()
|
||||
|
@ -301,9 +301,9 @@ class PostProcessor(object):
|
|||
# if the NZBGet Directory option is enabled, let's use that folder name and append the jobname.
|
||||
if self.nzb_name == 'Manual Run':
|
||||
logger.fdebug(module + ' Manual Run Post-Processing enabled.')
|
||||
elif mylar.NZBGET_DIRECTORY is not None and mylar.NZBGET_DIRECTORY is not 'None' and len(mylar.NZBGET_DIRECTORY) > 4:
|
||||
elif all([mylar.CONFIG.NZBGET_DIRECTORY is not None, mylar.CONFIG.NZBGET_DIRECTORY is not 'None']):
|
||||
logger.fdebug(module + ' NZB name as passed from NZBGet: ' + self.nzb_name)
|
||||
self.nzb_folder = os.path.join(mylar.NZBGET_DIRECTORY, self.nzb_name).encode(mylar.SYS_ENCODING)
|
||||
self.nzb_folder = os.path.join(mylar.CONFIG.NZBGET_DIRECTORY, self.nzb_name).encode(mylar.SYS_ENCODING)
|
||||
logger.fdebug(module + ' NZBGET Download folder option enabled. Directory set to : ' + self.nzb_folder)
|
||||
myDB = db.DBConnection()
|
||||
|
||||
|
@ -322,7 +322,7 @@ class PostProcessor(object):
|
|||
|
||||
#load the hashes for torrents so continual post-processing of same issues don't occur.
|
||||
pp_crclist = []
|
||||
if mylar.ENABLE_TORRENTS:
|
||||
if mylar.CONFIG.ENABLE_TORRENTS:
|
||||
pp_crc = myDB.select("SELECT a.crc, b.IssueID FROM Snatched as a INNER JOIN issues as b ON a.IssueID=b.IssueID WHERE a.Status='Post-Processed' and a.crc is not NULL and (b.Status='Downloaded' or b.status='Archived ORDER BY b.IssueDate')")
|
||||
for pp in pp_crc:
|
||||
pp_crclist.append({'IssueID': pp['IssueID'],
|
||||
|
@ -343,7 +343,7 @@ class PostProcessor(object):
|
|||
manual_arclist = []
|
||||
|
||||
for fl in filelist['comiclist']:
|
||||
if mylar.ENABLE_TORRENTS:
|
||||
if mylar.CONFIG.ENABLE_TORRENTS:
|
||||
crcchk = None
|
||||
tcrc = helpers.crc(os.path.join(fl['comiclocation'], fl['comicfilename'].decode(mylar.SYS_ENCODING)))
|
||||
crcchk = [x for x in pp_crclist if tcrc == x['crc']]
|
||||
|
@ -364,7 +364,7 @@ class PostProcessor(object):
|
|||
if not any(re.sub('[\|\s]', '', cname.lower()) == x for x in loopchk):
|
||||
loopchk.append(re.sub('[\|\s]', '', cname.lower()))
|
||||
|
||||
if all([mylar.ANNUALS_ON, 'annual' in mod_seriesname.lower()]):
|
||||
if all([mylar.CONFIG.ANNUALS_ON, 'annual' in mod_seriesname.lower()]):
|
||||
mod_seriesname = re.sub('annual', '', mod_seriesname, flags=re.I).strip()
|
||||
|
||||
#make sure we add back in the original parsed filename here.
|
||||
|
@ -394,7 +394,7 @@ class PostProcessor(object):
|
|||
wv_comicversion = wv['ComicVersion']
|
||||
wv_publisher = wv['ComicPublisher']
|
||||
wv_total = wv['Total']
|
||||
if mylar.FOLDER_SCAN_LOG_VERBOSE:
|
||||
if mylar.CONFIG.FOLDER_SCAN_LOG_VERBOSE:
|
||||
logger.fdebug('Queuing to Check: ' + wv['ComicName'] + ' [' + str(wv['ComicYear']) + '] -- ' + str(wv['ComicID']))
|
||||
|
||||
#force it to use the Publication Date of the latest issue instead of the Latest Date (which could be anything)
|
||||
|
@ -658,21 +658,22 @@ class PostProcessor(object):
|
|||
i = 0
|
||||
#k is ComicName
|
||||
#v is ArcValues and WatchValues
|
||||
if len(v) > 0:
|
||||
logger.fdebug('%s Now Checking if %s issue(s) may also reside in one of the storyarc\'s that I am watching.' % (module, len(v)))
|
||||
while i < len(v):
|
||||
if k is None or k == 'None':
|
||||
pass
|
||||
else:
|
||||
logger.fdebug(module + ' Now Checking if the issue also resides in one of the storyarc\'s that I am watching.')
|
||||
arcm = filechecker.FileChecker(watchcomic=k, Publisher=v[i]['ArcValues']['ComicPublisher'], manual=v[i]['WatchValues'])
|
||||
arcmatch = arcm.matchIT(fl)
|
||||
logger.info('arcmatch: ' + str(arcmatch))
|
||||
#logger.fdebug('arcmatch: ' + str(arcmatch))
|
||||
if arcmatch['process_status'] == 'fail':
|
||||
nm+=1
|
||||
else:
|
||||
temploc= arcmatch['justthedigits'].replace('_', ' ')
|
||||
temploc = re.sub('[\#\']', '', temploc)
|
||||
if helpers.issuedigits(temploc) != helpers.issuedigits(v[i]['ArcValues']['IssueNumber']):
|
||||
logger.info('issues dont match. Skipping')
|
||||
#logger.fdebug('issues dont match. Skipping')
|
||||
i+=1
|
||||
continue
|
||||
if 'annual' in temploc.lower():
|
||||
|
@ -702,8 +703,8 @@ class PostProcessor(object):
|
|||
logger.fdebug('issuedate:' + str(issuechk['IssueDate']))
|
||||
logger.fdebug('issuechk: ' + str(issuechk['IssueDate'][5:7]))
|
||||
|
||||
logger.info('StoreDate ' + str(issuechk['StoreDate']))
|
||||
logger.info('IssueDate: ' + str(issuechk['IssueDate']))
|
||||
logger.fdebug('StoreDate ' + str(issuechk['StoreDate']))
|
||||
logger.fdebug('IssueDate: ' + str(issuechk['IssueDate']))
|
||||
if all([issuechk['StoreDate'] is not None, issuechk['StoreDate'] != '0000-00-00']) or all([issuechk['IssueDate'] is not None, issuechk['IssueDate'] != '0000-00-00']):
|
||||
if issuechk['StoreDate'] == '0000-00-00':
|
||||
datevalue = issuechk['IssueDate']
|
||||
|
@ -734,9 +735,9 @@ class PostProcessor(object):
|
|||
else:
|
||||
logger.info(module + ' Found matching issue # ' + str(fcdigit) + ' for ComicID: ' + str(v[i]['WatchValues']['ComicID']) + ' / IssueID: ' + str(issuechk['IssueID']))
|
||||
|
||||
logger.info('datematch: ' + str(datematch))
|
||||
logger.info('temploc: ' + str(helpers.issuedigits(temploc)))
|
||||
logger.info('arcissue: ' + str(helpers.issuedigits(v[i]['ArcValues']['IssueNumber'])))
|
||||
logger.fdebug('datematch: ' + str(datematch))
|
||||
logger.fdebug('temploc: ' + str(helpers.issuedigits(temploc)))
|
||||
logger.fdebug('arcissue: ' + str(helpers.issuedigits(v[i]['ArcValues']['IssueNumber'])))
|
||||
if datematch == "True" and helpers.issuedigits(temploc) == helpers.issuedigits(v[i]['ArcValues']['IssueNumber']):
|
||||
passit = False
|
||||
if len(manual_list) > 0:
|
||||
|
@ -771,7 +772,7 @@ class PostProcessor(object):
|
|||
"Publisher": arcpublisher,
|
||||
"ReadingOrder": v[i]['ArcValues']['ReadingOrder'],
|
||||
"ComicName": k})
|
||||
logger.fdebug(module + '[SUCCESSFUL MATCH: ' + k + '-' + v[i]['WatchValues']['ComicID'] + '] Match verified for ' + arcmatch['comicfilename'])
|
||||
logger.info(module + '[SUCCESSFUL MATCH: ' + k + '-' + v[i]['WatchValues']['ComicID'] + '] Match verified for ' + arcmatch['comicfilename'])
|
||||
break
|
||||
else:
|
||||
logger.fdebug(module + '[NON-MATCH: ' + k + '-' + v[i]['WatchValues']['ComicID'] + '] Incorrect series - not populating..continuing post-processing')
|
||||
|
@ -795,7 +796,7 @@ class PostProcessor(object):
|
|||
|
||||
crcvalue = helpers.crc(ofilename)
|
||||
|
||||
if mylar.ENABLE_META:
|
||||
if mylar.CONFIG.ENABLE_META:
|
||||
logger.info('[STORY-ARC POST-PROCESSING] Metatagging enabled - proceeding...')
|
||||
try:
|
||||
import cmtagmylar
|
||||
|
@ -843,14 +844,14 @@ class PostProcessor(object):
|
|||
return self.queue.put(self.valreturn)
|
||||
|
||||
#send to renamer here if valid.
|
||||
if mylar.RENAME_FILES:
|
||||
if mylar.CONFIG.RENAME_FILES:
|
||||
renamed_file = helpers.rename_param(ml['ComicID'], ml['ComicName'], ml['IssueNumber'], dfilename, issueid=ml['IssueID'], arc=ml['StoryArc'])
|
||||
if renamed_file:
|
||||
dfilename = renamed_file['nfilename']
|
||||
logger.fdebug(module + ' Renaming file to conform to configuration: ' + ofilename)
|
||||
|
||||
#if from a StoryArc, check to see if we're appending the ReadingOrder to the filename
|
||||
if mylar.READ2FILENAME:
|
||||
if mylar.CONFIG.READ2FILENAME:
|
||||
|
||||
logger.fdebug(module + ' readingorder#: ' + str(ml['ReadingOrder']))
|
||||
if int(ml['ReadingOrder']) < 10: readord = "00" + str(ml['ReadingOrder'])
|
||||
|
@ -863,7 +864,7 @@ class PostProcessor(object):
|
|||
logger.fdebug(module + ' Destination Path : ' + grab_dst)
|
||||
logger.fdebug(module + ' Source Path : ' + grab_src)
|
||||
|
||||
logger.info(module + '[ONE-OFF MODE][' + mylar.ARC_FILEOPS.upper() + '] ' + grab_src + ' into directory : ' + grab_dst)
|
||||
logger.info(module + '[ONE-OFF MODE][' + mylar.CONFIG.ARC_FILEOPS.upper() + '] ' + grab_src + ' into directory : ' + grab_dst)
|
||||
#this is also for issues that are part of a story arc, and don't belong to a watchlist series (ie. one-off's)
|
||||
|
||||
try:
|
||||
|
@ -871,11 +872,11 @@ class PostProcessor(object):
|
|||
if not fileoperation:
|
||||
raise OSError
|
||||
except (OSError, IOError):
|
||||
logger.fdebug(module + '[ONE-OFF MODE][' + mylar.ARC_FILEOPS.upper() + '] Failure ' + grab_src + ' - check directories and manually re-run.')
|
||||
logger.fdebug(module + '[ONE-OFF MODE][' + mylar.CONFIG.ARC_FILEOPS.upper() + '] Failure ' + grab_src + ' - check directories and manually re-run.')
|
||||
return
|
||||
|
||||
#tidyup old path
|
||||
if any([mylar.FILE_OPTS == 'move', mylar.FILE_OPTS == 'copy']):
|
||||
if any([mylar.CONFIG.FILE_OPTS == 'move', mylar.CONFIG.FILE_OPTS == 'copy']):
|
||||
self.tidyup(src_location, True)
|
||||
|
||||
#delete entry from nzblog table
|
||||
|
@ -898,7 +899,7 @@ class PostProcessor(object):
|
|||
#one-off manual pp'd of torrents
|
||||
oneofflist = myDB.select("select s.Issue_Number, s.ComicName, s.IssueID, s.ComicID, s.Provider, w.PUBLISHER, w.weeknumber, w.year from snatched as s inner join nzblog as n on s.IssueID = n.IssueID and s.Hash is not NULL inner join weekly as w on s.IssueID = w.IssueID WHERE (s.Provider ='32P' or s.Provider='TPSE' or s.Provider='WWT' or s.Provider='DEM') AND n.OneOff == 1;")
|
||||
if oneofflist is None:
|
||||
logger.fdebug(module + ' No one-off\'s have ever been snatched using Mylar.')
|
||||
logger.fdebug(module + ' No one-off\'s have ever been snatched using mylar.')
|
||||
else:
|
||||
oneoffvals = []
|
||||
oneoff_issuelist = []
|
||||
|
@ -1026,6 +1027,7 @@ class PostProcessor(object):
|
|||
comicname = None
|
||||
issuenumber = None
|
||||
if tmpiss is not None:
|
||||
logger.info('shouldnt be here')
|
||||
ppinfo.append({'comicid': tmpiss['ComicID'],
|
||||
'issueid': issueid,
|
||||
'comicname': tmpiss['ComicName'],
|
||||
|
@ -1036,12 +1038,14 @@ class PostProcessor(object):
|
|||
'oneoff': self.oneoff})
|
||||
|
||||
elif all([self.oneoff is not None, issueid[0] == 'S']):
|
||||
logger.info('should be here')
|
||||
issuearcid = re.sub('S', '', issueid).strip()
|
||||
oneinfo = myDB.selectone("SELECT * FROM readinglist WHERE IssueArcID=?", [issuearcid]).fetchone()
|
||||
if oneinfo is None:
|
||||
logger.warn('Unable to locate issue as previously snatched arc issue - it might be something else...')
|
||||
self._log('Unable to locate issue as previously snatched arc issue - it might be something else...')
|
||||
else:
|
||||
logger.info('adding stuff')
|
||||
ppinfo.append({'comicid': oneinfo['ComicID'],
|
||||
'comicname': oneinfo['ComicName'],
|
||||
'issuenumber': oneinfo['IssueNumber'],
|
||||
|
@ -1053,7 +1057,7 @@ class PostProcessor(object):
|
|||
self.oneoff = True
|
||||
|
||||
|
||||
if all([len(ppinfo) == 0, self.oneoff is not None, mylar.ALT_PULL == 2]):
|
||||
if all([len(ppinfo) == 0, self.oneoff is not None, mylar.CONFIG.ALT_PULL == 2]):
|
||||
oneinfo = myDB.selectone('SELECT * FROM weekly WHERE IssueID=?', [issueid]).fetchone()
|
||||
if oneinfo is None:
|
||||
oneinfo = myDB.selectone('SELECT * FROM oneoffhistory WHERE IssueID=?', [issueid]).fetchone()
|
||||
|
@ -1153,8 +1157,8 @@ class PostProcessor(object):
|
|||
# else:
|
||||
# self._log("One-off mode enabled for Post-Processing. All I'm doing is moving the file untouched into the Grab-bag directory.")
|
||||
# logger.info(module + ' One-off mode enabled for Post-Processing. Will move into Grab-bag directory.')
|
||||
# self._log("Grab-Bag Directory set to : " + mylar.GRABBAG_DIR)
|
||||
# grdst = mylar.GRABBAG_DIR
|
||||
# self._log("Grab-Bag Directory set to : " + mylar.CONFIG.GRABBAG_DIR)
|
||||
# grdst = mylar.CONFIG.GRABBAG_DIR
|
||||
#
|
||||
# odir = None
|
||||
# ofilename = None
|
||||
|
@ -1208,7 +1212,7 @@ class PostProcessor(object):
|
|||
#
|
||||
# #if a one-off download from the pull-list, will not have an issueid associated with it, and will fail to due conversion/tagging.
|
||||
# #if altpull/2 method is being used, issueid may already be present so conversion/tagging is possible with some additional fixes.
|
||||
# if all([mylar.ENABLE_META, issueid is not None]):
|
||||
# if all([mylar.CONFIG.ENABLE_META, issueid is not None]):
|
||||
# self._log("Metatagging enabled - proceeding...")
|
||||
# try:
|
||||
# import cmtagmylar
|
||||
|
@ -1252,7 +1256,7 @@ class PostProcessor(object):
|
|||
# return self.queue.put(self.valreturn)
|
||||
#
|
||||
# #send to renamer here if valid.
|
||||
# if mylar.RENAME_FILES:
|
||||
# if mylar.CONFIG.RENAME_FILES:
|
||||
# renamed_file = helpers.rename_param(comicid, comicname, issuenumber, dfilename, issueid=issueid, arc=sarc)
|
||||
# if renamed_file:
|
||||
# dfilename = renamed_file['nfilename']
|
||||
|
@ -1261,7 +1265,7 @@ class PostProcessor(object):
|
|||
#
|
||||
# if sandwich is not None and 'S' in sandwich:
|
||||
# #if from a StoryArc, check to see if we're appending the ReadingOrder to the filename
|
||||
# if mylar.READ2FILENAME:
|
||||
# if mylar.CONFIG.READ2FILENAME:
|
||||
# logger.fdebug(module + ' readingorder#: ' + str(arcdata['ReadingOrder']))
|
||||
# if int(arcdata['ReadingOrder']) < 10: readord = "00" + str(arcdata['ReadingOrder'])
|
||||
# elif int(arcdata['ReadingOrder']) >= 10 and int(arcdata['ReadingOrder']) <= 99: readord = "0" + str(arcdata['ReadingOrder'])
|
||||
|
@ -1277,19 +1281,19 @@ class PostProcessor(object):
|
|||
#
|
||||
# logger.info(module + ' Destination Path : ' + grab_dst)
|
||||
#
|
||||
# logger.info(module + '[' + mylar.FILE_OPTS + '] ' + str(ofilename) + ' into directory : ' + str(grab_dst))
|
||||
# logger.info(module + '[' + mylar.CONFIG.FILE_OPTS + '] ' + str(ofilename) + ' into directory : ' + str(grab_dst))
|
||||
#
|
||||
# try:
|
||||
# fileoperation = helpers.file_ops(grab_src, grab_dst)
|
||||
# if not fileoperation:
|
||||
# raise OSError
|
||||
# except (OSError, IOError):
|
||||
# logger.fdebug(module + ' Failed to ' + mylar.FILE_OPTS + ' ' + src + ' - check directories and manually re-run.')
|
||||
# self._log("Failed to " + mylar.FILE_OPTS + " " + src + " - check directories and manually re-run.")
|
||||
# logger.fdebug(module + ' Failed to ' + mylar.CONFIG.FILE_OPTS + ' ' + src + ' - check directories and manually re-run.')
|
||||
# self._log("Failed to " + mylar.CONFIG.FILE_OPTS + " " + src + " - check directories and manually re-run.")
|
||||
# return
|
||||
#
|
||||
# #tidyup old path
|
||||
# if any([mylar.FILE_OPTS == 'move', mylar.FILE_OPTS == 'copy']):
|
||||
# if any([mylar.CONFIG.FILE_OPTS == 'move', mylar.CONFIG.FILE_OPTS == 'copy']):
|
||||
# self.tidyup(src_location, True)
|
||||
#
|
||||
# #delete entry from nzblog table
|
||||
|
@ -1365,7 +1369,7 @@ class PostProcessor(object):
|
|||
#check if duplicate dump folder is enabled and if so move duplicate file in there for manual intervention.
|
||||
#'dupe_file' - do not write new file as existing file is better quality
|
||||
#'dupe_src' - write new file, as existing file is a lesser quality (dupe)
|
||||
if mylar.DDUMP and not all([mylar.DUPLICATE_DUMP is None, mylar.DUPLICATE_DUMP == '']): #DUPLICATE_DUMP
|
||||
if mylar.CONFIG.DDUMP and not all([mylar.CONFIG.DUPLICATE_DUMP is None, mylar.CONFIG.DUPLICATE_DUMP == '']): #DUPLICATE_DUMP
|
||||
dupchkit = self.duplicate_process(dupthis)
|
||||
if dupchkit == False:
|
||||
logger.warn('Unable to move duplicate file - skipping post-processing of this file.')
|
||||
|
@ -1391,8 +1395,8 @@ class PostProcessor(object):
|
|||
# #check if duplicate dump folder is enabled and if so move duplicate file in there for manual intervention.
|
||||
# #'dupe_file' - do not write new file as existing file is better quality
|
||||
# #'dupe_src' - write new file, as existing file is a lesser quality (dupe)
|
||||
# if mylar.DUPLICATE_DUMP:
|
||||
# if mylar.DDUMP and not all([mylar.DUPLICATE_DUMP is None, mylar.DUPLICATE_DUMP == '']):
|
||||
# if mylar.CONFIG.DUPLICATE_DUMP:
|
||||
# if mylar.CONFIG.DDUMP and not all([mylar.CONFIG.DUPLICATE_DUMP is None, mylar.CONFIG.DUPLICATE_DUMP == '']):
|
||||
# dupchkit = self.duplicate_process(dupthis)
|
||||
# if dupchkit == False:
|
||||
# logger.warn('Unable to move duplicate file - skipping post-processing of this file.')
|
||||
|
@ -1415,6 +1419,7 @@ class PostProcessor(object):
|
|||
module = self.module
|
||||
myDB = db.DBConnection()
|
||||
if manual is None:
|
||||
sandwich = None
|
||||
issueid = tinfo['issueid']
|
||||
comicid = tinfo['comicid']
|
||||
comicname = tinfo['comicname']
|
||||
|
@ -1470,8 +1475,8 @@ class PostProcessor(object):
|
|||
else:
|
||||
self._log("One-off mode enabled for Post-Processing. All I'm doing is moving the file untouched into the Grab-bag directory.")
|
||||
logger.info(module + ' One-off mode enabled for Post-Processing. Will move into Grab-bag directory.')
|
||||
self._log("Grab-Bag Directory set to : " + mylar.GRABBAG_DIR)
|
||||
grdst = mylar.GRABBAG_DIR
|
||||
self._log("Grab-Bag Directory set to : " + mylar.CONFIG.GRABBAG_DIR)
|
||||
grdst = mylar.CONFIG.GRABBAG_DIR
|
||||
|
||||
odir = location
|
||||
|
||||
|
@ -1531,7 +1536,7 @@ class PostProcessor(object):
|
|||
|
||||
#if a one-off download from the pull-list, will not have an issueid associated with it, and will fail to due conversion/tagging.
|
||||
#if altpull/2 method is being used, issueid may already be present so conversion/tagging is possible with some additional fixes.
|
||||
if all([mylar.ENABLE_META, issueid is not None]):
|
||||
if all([mylar.CONFIG.ENABLE_META, issueid is not None]):
|
||||
self._log("Metatagging enabled - proceeding...")
|
||||
try:
|
||||
import cmtagmylar
|
||||
|
@ -1574,7 +1579,7 @@ class PostProcessor(object):
|
|||
return self.queue.put(self.valreturn)
|
||||
|
||||
#send to renamer here if valid.
|
||||
if mylar.RENAME_FILES:
|
||||
if mylar.CONFIG.RENAME_FILES:
|
||||
renamed_file = helpers.rename_param(comicid, comicname, issuenumber, dfilename, issueid=issueid, arc=sarc)
|
||||
if renamed_file:
|
||||
dfilename = renamed_file['nfilename']
|
||||
|
@ -1582,7 +1587,7 @@ class PostProcessor(object):
|
|||
|
||||
if sandwich is not None and 'S' in sandwich:
|
||||
#if from a StoryArc, check to see if we're appending the ReadingOrder to the filename
|
||||
if mylar.READ2FILENAME:
|
||||
if mylar.CONFIG.READ2FILENAME:
|
||||
logger.fdebug(module + ' readingorder#: ' + str(arcdata['ReadingOrder']))
|
||||
if int(arcdata['ReadingOrder']) < 10: readord = "00" + str(arcdata['ReadingOrder'])
|
||||
elif int(arcdata['ReadingOrder']) >= 10 and int(arcdata['ReadingOrder']) <= 99: readord = "0" + str(arcdata['ReadingOrder'])
|
||||
|
@ -1597,19 +1602,19 @@ class PostProcessor(object):
|
|||
self._log("Destination Path : " + grab_dst)
|
||||
|
||||
logger.info(module + ' Destination Path : ' + grab_dst)
|
||||
logger.info(module + '[' + mylar.FILE_OPTS + '] ' + ofilename + ' into directory : ' + grab_dst)
|
||||
logger.info(module + '[' + mylar.CONFIG.FILE_OPTS + '] ' + ofilename + ' into directory : ' + grab_dst)
|
||||
|
||||
try:
|
||||
fileoperation = helpers.file_ops(grab_src, grab_dst)
|
||||
if not fileoperation:
|
||||
raise OSError
|
||||
except (OSError, IOError):
|
||||
logger.fdebug(module + ' Failed to ' + mylar.FILE_OPTS + ' ' + src + ' - check directories and manually re-run.')
|
||||
self._log("Failed to " + mylar.FILE_OPTS + " " + src + " - check directories and manually re-run.")
|
||||
logger.fdebug(module + ' Failed to ' + mylar.CONFIG.FILE_OPTS + ' ' + src + ' - check directories and manually re-run.')
|
||||
self._log("Failed to " + mylar.CONFIG.FILE_OPTS + " " + src + " - check directories and manually re-run.")
|
||||
return
|
||||
|
||||
#tidyup old path
|
||||
if any([mylar.FILE_OPTS == 'move', mylar.FILE_OPTS == 'copy']):
|
||||
if any([mylar.CONFIG.FILE_OPTS == 'move', mylar.CONFIG.FILE_OPTS == 'copy']):
|
||||
self.tidyup(src_location, True)
|
||||
|
||||
#delete entry from nzblog table
|
||||
|
@ -1688,7 +1693,7 @@ class PostProcessor(object):
|
|||
#check if duplicate dump folder is enabled and if so move duplicate file in there for manual intervention.
|
||||
#'dupe_file' - do not write new file as existing file is better quality
|
||||
#'dupe_src' - write new file, as existing file is a lesser quality (dupe)
|
||||
if mylar.DDUMP and not all([mylar.DUPLICATE_DUMP is None, mylar.DUPLICATE_DUMP == '']): #DUPLICATE_DUMP
|
||||
if mylar.CONFIG.DDUMP and not all([mylar.CONFIG.DUPLICATE_DUMP is None, mylar.CONFIG.DUPLICATE_DUMP == '']): #DUPLICATE_DUMP
|
||||
dupchkit = self.duplicate_process(dupthis)
|
||||
if dupchkit == False:
|
||||
logger.warn('Unable to move duplicate file - skipping post-processing of this file.')
|
||||
|
@ -1714,8 +1719,8 @@ class PostProcessor(object):
|
|||
#check if duplicate dump folder is enabled and if so move duplicate file in there for manual intervention.
|
||||
#'dupe_file' - do not write new file as existing file is better quality
|
||||
#'dupe_src' - write new file, as existing file is a lesser quality (dupe)
|
||||
if mylar.DUPLICATE_DUMP:
|
||||
if mylar.DDUMP and not all([mylar.DUPLICATE_DUMP is None, mylar.DUPLICATE_DUMP == '']):
|
||||
if mylar.CONFIG.DUPLICATE_DUMP:
|
||||
if mylar.CONFIG.DDUMP and not all([mylar.CONFIG.DUPLICATE_DUMP is None, mylar.CONFIG.DUPLICATE_DUMP == '']):
|
||||
dupchkit = self.duplicate_process(dupthis)
|
||||
if dupchkit == False:
|
||||
logger.warn('Unable to move duplicate file - skipping post-processing of this file.')
|
||||
|
@ -1743,7 +1748,7 @@ class PostProcessor(object):
|
|||
myDB = db.DBConnection()
|
||||
comicnzb = myDB.selectone("SELECT * from comics WHERE comicid=?", [comicid]).fetchone()
|
||||
issuenzb = myDB.selectone("SELECT * from issues WHERE issueid=? AND comicid=? AND ComicName NOT NULL", [issueid, comicid]).fetchone()
|
||||
if ml is not None and mylar.SNATCHEDTORRENT_NOTIFY:
|
||||
if ml is not None and mylar.CONFIG.SNATCHEDTORRENT_NOTIFY:
|
||||
snatchnzb = myDB.selectone("SELECT * from snatched WHERE IssueID=? AND ComicID=? AND (provider=? OR provider=? OR provider=? OR provider=?) AND Status='Snatched'", [issueid, comicid, 'TPSE', 'DEM', 'WWT', '32P']).fetchone()
|
||||
if snatchnzb is None:
|
||||
logger.fdebug(module + ' Was not downloaded with Mylar and the usage of torrents. Disabling torrent manual post-processing completion notification.')
|
||||
|
@ -1831,14 +1836,14 @@ class PostProcessor(object):
|
|||
issueno = iss
|
||||
|
||||
# issue zero-suppression here
|
||||
if mylar.ZERO_LEVEL == "0":
|
||||
if mylar.CONFIG.ZERO_LEVEL == "0":
|
||||
zeroadd = ""
|
||||
else:
|
||||
if mylar.ZERO_LEVEL_N == "none": zeroadd = ""
|
||||
elif mylar.ZERO_LEVEL_N == "0x": zeroadd = "0"
|
||||
elif mylar.ZERO_LEVEL_N == "00x": zeroadd = "00"
|
||||
if mylar.CONFIG.ZERO_LEVEL_N == "none": zeroadd = ""
|
||||
elif mylar.CONFIG.ZERO_LEVEL_N == "0x": zeroadd = "0"
|
||||
elif mylar.CONFIG.ZERO_LEVEL_N == "00x": zeroadd = "00"
|
||||
|
||||
logger.fdebug(module + ' Zero Suppression set to : ' + str(mylar.ZERO_LEVEL_N))
|
||||
logger.fdebug(module + ' Zero Suppression set to : ' + str(mylar.CONFIG.ZERO_LEVEL_N))
|
||||
|
||||
prettycomiss = None
|
||||
|
||||
|
@ -1876,10 +1881,10 @@ class PostProcessor(object):
|
|||
prettycomiss = str(zeroadd) + str(iss)
|
||||
if issue_except != 'None':
|
||||
prettycomiss = str(prettycomiss) + issue_except
|
||||
logger.fdebug('Zero level supplement set to ' + str(mylar.ZERO_LEVEL_N) + '. Issue will be set as : ' + str(prettycomiss))
|
||||
logger.fdebug('Zero level supplement set to ' + str(mylar.CONFIG.ZERO_LEVEL_N) + '. Issue will be set as : ' + str(prettycomiss))
|
||||
elif int(issueno) >= 10 and int(issueno) < 100:
|
||||
logger.fdebug('issue detected greater than 10, but less than 100')
|
||||
if mylar.ZERO_LEVEL_N == "none":
|
||||
if mylar.CONFIG.ZERO_LEVEL_N == "none":
|
||||
zeroadd = ""
|
||||
else:
|
||||
zeroadd = "0"
|
||||
|
@ -1893,7 +1898,7 @@ class PostProcessor(object):
|
|||
prettycomiss = str(zeroadd) + str(iss)
|
||||
if issue_except != 'None':
|
||||
prettycomiss = str(prettycomiss) + issue_except
|
||||
logger.fdebug('Zero level supplement set to ' + str(mylar.ZERO_LEVEL_N) + '.Issue will be set as : ' + str(prettycomiss))
|
||||
logger.fdebug('Zero level supplement set to ' + str(mylar.CONFIG.ZERO_LEVEL_N) + '.Issue will be set as : ' + str(prettycomiss))
|
||||
else:
|
||||
logger.fdebug('issue detected greater than 100')
|
||||
if '.' in iss:
|
||||
|
@ -1902,7 +1907,7 @@ class PostProcessor(object):
|
|||
prettycomiss = str(issueno)
|
||||
if issue_except != 'None':
|
||||
prettycomiss = str(prettycomiss) + issue_except
|
||||
logger.fdebug('Zero level supplement set to ' + str(mylar.ZERO_LEVEL_N) + '. Issue will be set as : ' + str(prettycomiss))
|
||||
logger.fdebug('Zero level supplement set to ' + str(mylar.CONFIG.ZERO_LEVEL_N) + '. Issue will be set as : ' + str(prettycomiss))
|
||||
|
||||
elif len(str(issueno)) == 0:
|
||||
prettycomiss = str(issueno)
|
||||
|
@ -1929,10 +1934,10 @@ class PostProcessor(object):
|
|||
# prettycomiss = str(zeroadd) + str(iss)
|
||||
# if issue_except != 'None':
|
||||
# prettycomiss = str(prettycomiss) + issue_except
|
||||
# self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss))
|
||||
# self._log("Zero level supplement set to " + str(mylar.CONFIG.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss))
|
||||
# elif int(issueno) >= 10 and int(issueno) < 100:
|
||||
# self._log("issue detected greater than 10, but less than 100")
|
||||
# if mylar.ZERO_LEVEL_N == "none":
|
||||
# if mylar.CONFIG.ZERO_LEVEL_N == "none":
|
||||
# zeroadd = ""
|
||||
# else:
|
||||
# zeroadd = "0"
|
||||
|
@ -1946,7 +1951,7 @@ class PostProcessor(object):
|
|||
# prettycomiss = str(zeroadd) + str(iss)
|
||||
# if issue_except != 'None':
|
||||
# prettycomiss = str(prettycomiss) + issue_except
|
||||
# self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ".Issue will be set as : " + str(prettycomiss))
|
||||
# self._log("Zero level supplement set to " + str(mylar.CONFIG.ZERO_LEVEL_N) + ".Issue will be set as : " + str(prettycomiss))
|
||||
# else:
|
||||
# self._log("issue detected greater than 100")
|
||||
# if '.' in iss:
|
||||
|
@ -1955,7 +1960,7 @@ class PostProcessor(object):
|
|||
# prettycomiss = str(issueno)
|
||||
# if issue_except != 'None':
|
||||
# prettycomiss = str(prettycomiss) + issue_except
|
||||
# self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss))
|
||||
# self._log("Zero level supplement set to " + str(mylar.CONFIG.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss))
|
||||
# else:
|
||||
# prettycomiss = str(issueno)
|
||||
# self._log("issue length error - cannot determine length. Defaulting to None: " + str(prettycomiss))
|
||||
|
@ -1997,14 +2002,14 @@ class PostProcessor(object):
|
|||
comversion = 'None'
|
||||
#if comversion is None, remove it so it doesn't populate with 'None'
|
||||
if comversion == 'None':
|
||||
chunk_f_f = re.sub('\$VolumeN', '', mylar.FILE_FORMAT)
|
||||
chunk_f_f = re.sub('\$VolumeN', '', mylar.CONFIG.FILE_FORMAT)
|
||||
chunk_f = re.compile(r'\s+')
|
||||
chunk_file_format = chunk_f.sub(' ', chunk_f_f)
|
||||
self._log("No version # found for series - tag will not be available for renaming.")
|
||||
logger.fdebug(module + ' No version # found for series, removing from filename')
|
||||
logger.fdebug('%s New format is now: %s' % (module, chunk_file_format))
|
||||
else:
|
||||
chunk_file_format = mylar.FILE_FORMAT
|
||||
chunk_file_format = mylar.CONFIG.FILE_FORMAT
|
||||
|
||||
if annchk == "no":
|
||||
chunk_f_f = re.sub('\$Annual', '', chunk_file_format)
|
||||
|
@ -2069,12 +2074,12 @@ class PostProcessor(object):
|
|||
|
||||
|
||||
#tag the meta.
|
||||
if mylar.ENABLE_META:
|
||||
if mylar.CONFIG.ENABLE_META:
|
||||
|
||||
self._log("Metatagging enabled - proceeding...")
|
||||
logger.fdebug(module + ' Metatagging enabled - proceeding...')
|
||||
pcheck = "pass"
|
||||
if mylar.CMTAG_START_YEAR_AS_VOLUME:
|
||||
if mylar.CONFIG.CMTAG_START_YEAR_AS_VOLUME:
|
||||
vol_label = seriesyear
|
||||
else:
|
||||
vol_label = comversion
|
||||
|
@ -2136,7 +2141,7 @@ class PostProcessor(object):
|
|||
#self.fileop = shutil.move
|
||||
#Run Pre-script
|
||||
|
||||
if mylar.ENABLE_PRE_SCRIPTS:
|
||||
if mylar.CONFIG.ENABLE_PRE_SCRIPTS:
|
||||
nzbn = self.nzb_name #original nzb name
|
||||
nzbf = self.nzb_folder #original nzb folder
|
||||
#name, comicyear, comicid , issueid, issueyear, issue, publisher
|
||||
|
@ -2234,7 +2239,7 @@ class PostProcessor(object):
|
|||
logger.fdebug(module + ' Original Filename: ' + ofilename)
|
||||
logger.fdebug(module + ' Original Extension: ' + ext)
|
||||
|
||||
if mylar.FILE_FORMAT == '' or not mylar.RENAME_FILES:
|
||||
if mylar.CONFIG.FILE_FORMAT == '' or not mylar.CONFIG.RENAME_FILES:
|
||||
self._log("Rename Files isn't enabled...keeping original filename.")
|
||||
logger.fdebug(module + ' Rename Files is not enabled - keeping original filename.')
|
||||
#check if extension is in nzb_name - will screw up otherwise
|
||||
|
@ -2244,9 +2249,9 @@ class PostProcessor(object):
|
|||
nfilename = ofilename
|
||||
else:
|
||||
nfilename = helpers.replace_all(chunk_file_format, file_values)
|
||||
if mylar.REPLACE_SPACES:
|
||||
#mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
|
||||
nfilename = nfilename.replace(' ', mylar.REPLACE_CHAR)
|
||||
if mylar.CONFIG.REPLACE_SPACES:
|
||||
#mylar.CONFIG.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
|
||||
nfilename = nfilename.replace(' ', mylar.CONFIG.REPLACE_CHAR)
|
||||
nfilename = re.sub('[\,\:\?\"\']', '', nfilename)
|
||||
nfilename = re.sub('[\/\*]', '-', nfilename)
|
||||
self._log("New Filename: " + nfilename)
|
||||
|
@ -2263,7 +2268,7 @@ class PostProcessor(object):
|
|||
return self.queue.put(self.valreturn)
|
||||
|
||||
|
||||
if mylar.LOWERCASE_FILENAMES:
|
||||
if mylar.CONFIG.LOWERCASE_FILENAMES:
|
||||
dst = os.path.join(comlocation, (nfilename + ext).lower())
|
||||
else:
|
||||
dst = os.path.join(comlocation, (nfilename + ext.lower()))
|
||||
|
@ -2280,10 +2285,10 @@ class PostProcessor(object):
|
|||
logger.fdebug(module + ' odir: ' + odir)
|
||||
logger.fdebug(module + ' ofilename:' + ofilename)
|
||||
logger.fdebug(module + ' nfilename:' + nfilename + ext)
|
||||
if mylar.RENAME_FILES:
|
||||
if mylar.CONFIG.RENAME_FILES:
|
||||
if ofilename != (nfilename + ext):
|
||||
logger.fdebug(module + ' Renaming ' + os.path.join(odir, ofilename) + ' ..to.. ' + os.path.join(odir, nfilename + ext))
|
||||
#if mylar.FILE_OPTS == 'move':
|
||||
#if mylar.CONFIG.FILE_OPTS == 'move':
|
||||
# os.rename(os.path.join(odir, ofilename), os.path.join(odir, nfilename + ext))
|
||||
# else:
|
||||
# self.fileop(os.path.join(odir, ofilename), os.path.join(odir, nfilename + ext))
|
||||
|
@ -2293,21 +2298,21 @@ class PostProcessor(object):
|
|||
#src = os.path.join(self.nzb_folder, str(nfilename + ext))
|
||||
src = os.path.join(odir, ofilename)
|
||||
try:
|
||||
self._log("[" + mylar.FILE_OPTS + "] " + src + " - to - " + dst)
|
||||
self._log("[" + mylar.CONFIG.FILE_OPTS + "] " + src + " - to - " + dst)
|
||||
fileoperation = helpers.file_ops(src, dst)
|
||||
if not fileoperation:
|
||||
raise OSError
|
||||
except (OSError, IOError):
|
||||
self._log("Failed to " + mylar.FILE_OPTS + " " + src + " - check directories and manually re-run.")
|
||||
self._log("Failed to " + mylar.CONFIG.FILE_OPTS + " " + src + " - check directories and manually re-run.")
|
||||
self._log("Post-Processing ABORTED.")
|
||||
logger.warn(module + ' Failed to ' + mylar.FILE_OPTS + ' ' + src + ' - check directories and manually re-run.')
|
||||
logger.warn(module + ' Failed to ' + mylar.CONFIG.FILE_OPTS + ' ' + src + ' - check directories and manually re-run.')
|
||||
logger.warn(module + ' Post-Processing ABORTED')
|
||||
self.valreturn.append({"self.log": self.log,
|
||||
"mode": 'stop'})
|
||||
return self.queue.put(self.valreturn)
|
||||
|
||||
#tidyup old path
|
||||
if any([mylar.FILE_OPTS == 'move', mylar.FILE_OPTS == 'copy']):
|
||||
if any([mylar.CONFIG.FILE_OPTS == 'move', mylar.CONFIG.FILE_OPTS == 'copy']):
|
||||
self.tidyup(odir, True)
|
||||
|
||||
else:
|
||||
|
@ -2315,7 +2320,7 @@ class PostProcessor(object):
|
|||
downtype = 'PP'
|
||||
#Manual Run, this is the portion.
|
||||
src = os.path.join(odir, ofilename)
|
||||
if mylar.RENAME_FILES:
|
||||
if mylar.CONFIG.RENAME_FILES:
|
||||
if ofilename != (nfilename + ext):
|
||||
logger.fdebug(module + ' Renaming ' + os.path.join(odir, ofilename)) #' ..to.. ' + os.path.join(odir, self.nzb_folder, str(nfilename + ext)))
|
||||
#os.rename(os.path.join(odir, str(ofilename)), os.path.join(odir, str(nfilename + ext)))
|
||||
|
@ -2324,30 +2329,30 @@ class PostProcessor(object):
|
|||
logger.fdebug(module + ' Filename is identical as original, not renaming.')
|
||||
|
||||
logger.fdebug(module + ' odir src : ' + src)
|
||||
logger.fdebug(module + '[' + mylar.FILE_OPTS + '] ' + src + ' ... to ... ' + dst)
|
||||
logger.fdebug(module + '[' + mylar.CONFIG.FILE_OPTS + '] ' + src + ' ... to ... ' + dst)
|
||||
try:
|
||||
fileoperation = helpers.file_ops(src, dst)
|
||||
if not fileoperation:
|
||||
raise OSError
|
||||
except (OSError, IOError):
|
||||
logger.fdebug(module + ' Failed to ' + mylar.FILE_OPTS + ' ' + src + ' - check directories and manually re-run.')
|
||||
logger.fdebug(module + ' Failed to ' + mylar.CONFIG.FILE_OPTS + ' ' + src + ' - check directories and manually re-run.')
|
||||
logger.fdebug(module + ' Post-Processing ABORTED.')
|
||||
self.failed_files +=1
|
||||
self.valreturn.append({"self.log": self.log,
|
||||
"mode": 'stop'})
|
||||
return self.queue.put(self.valreturn)
|
||||
logger.info(module + ' ' + mylar.FILE_OPTS + ' successful to : ' + dst)
|
||||
logger.info(module + ' ' + mylar.CONFIG.FILE_OPTS + ' successful to : ' + dst)
|
||||
|
||||
if any([mylar.FILE_OPTS == 'move', mylar.FILE_OPTS == 'copy']):
|
||||
if any([mylar.CONFIG.FILE_OPTS == 'move', mylar.CONFIG.FILE_OPTS == 'copy']):
|
||||
self.tidyup(odir, False, subpath)
|
||||
|
||||
#Hopefully set permissions on downloaded file
|
||||
if mylar.ENFORCE_PERMS:
|
||||
if mylar.CONFIG.ENFORCE_PERMS:
|
||||
if mylar.OS_DETECT != 'windows':
|
||||
filechecker.setperms(dst.rstrip())
|
||||
else:
|
||||
try:
|
||||
permission = int(mylar.CHMOD_FILE, 8)
|
||||
permission = int(mylar.CONFIG.CHMOD_FILE, 8)
|
||||
os.umask(0)
|
||||
os.chmod(dst.rstrip(), permission)
|
||||
except OSError:
|
||||
|
@ -2355,7 +2360,7 @@ class PostProcessor(object):
|
|||
logger.fdebug(module + ' Continuing post-processing but unable to change file permissions in ' + dst)
|
||||
|
||||
#let's reset the fileop to the original setting just in case it's a manual pp run
|
||||
if mylar.FILE_OPTS == 'copy':
|
||||
if mylar.CONFIG.FILE_OPTS == 'copy':
|
||||
self.fileop = shutil.copy
|
||||
else:
|
||||
self.fileop = shutil.move
|
||||
|
@ -2400,7 +2405,7 @@ class PostProcessor(object):
|
|||
"mode": 'stop'})
|
||||
return self.queue.put(self.valreturn)
|
||||
|
||||
if mylar.READ2FILENAME:
|
||||
if mylar.CONFIG.READ2FILENAME:
|
||||
logger.fdebug(module + ' readingorder#: ' + str(arcinfo['ReadingOrder']))
|
||||
if int(arcinfo['ReadingOrder']) < 10: readord = "00" + str(arcinfo['ReadingOrder'])
|
||||
elif int(arcinfo['ReadingOrder']) >= 10 and int(arcinfo['ReadingOrder']) <= 99: readord = "0" + str(arcinfo['ReadingOrder'])
|
||||
|
@ -2414,7 +2419,7 @@ class PostProcessor(object):
|
|||
logger.fdebug(module + ' Destination Path : ' + grab_dst)
|
||||
grab_src = dst
|
||||
logger.fdebug(module + ' Source Path : ' + grab_src)
|
||||
logger.info(module + '[' + mylar.ARC_FILEOPS.upper() + '] ' + str(dst) + ' into directory : ' + str(grab_dst))
|
||||
logger.info(module + '[' + mylar.CONFIG.ARC_FILEOPS.upper() + '] ' + str(dst) + ' into directory : ' + str(grab_dst))
|
||||
|
||||
try:
|
||||
#need to ensure that src is pointing to the series in order to do a soft/hard-link properly
|
||||
|
@ -2423,7 +2428,7 @@ class PostProcessor(object):
|
|||
raise OSError
|
||||
#shutil.copy(grab_src, grab_dst)
|
||||
except (OSError, IOError):
|
||||
logger.fdebug(module + '[' + mylar.ARC_FILEOPS.upper() + '] Failure ' + src + ' - check directories and manually re-run.')
|
||||
logger.fdebug(module + '[' + mylar.CONFIG.ARC_FILEOPS.upper() + '] Failure ' + src + ' - check directories and manually re-run.')
|
||||
return
|
||||
|
||||
#delete entry from nzblog table in case it was forced via the Story Arc Page
|
||||
|
@ -2441,13 +2446,13 @@ class PostProcessor(object):
|
|||
except:
|
||||
pass
|
||||
|
||||
if mylar.WEEKFOLDER or mylar.SEND2READ:
|
||||
#mylar.WEEKFOLDER = will *copy* the post-processed file to the weeklypull list folder for the given week.
|
||||
#mylar.SEND2READ = will add the post-processed file to the readinglits
|
||||
if mylar.CONFIG.WEEKFOLDER or mylar.CONFIG.SEND2READ:
|
||||
#mylar.CONFIG.WEEKFOLDER = will *copy* the post-processed file to the weeklypull list folder for the given week.
|
||||
#mylar.CONFIG.SEND2READ = will add the post-processed file to the readinglits
|
||||
weeklypull.weekly_check(comicid, issuenum, file=(nfilename +ext), path=dst, module=module, issueid=issueid)
|
||||
|
||||
# retrieve/create the corresponding comic objects
|
||||
if mylar.ENABLE_EXTRA_SCRIPTS:
|
||||
if mylar.CONFIG.ENABLE_EXTRA_SCRIPTS:
|
||||
folderp = dst #folder location after move/rename
|
||||
nzbn = self.nzb_name #original nzb name
|
||||
filen = nfilename + ext #new filename
|
||||
|
@ -2517,32 +2522,32 @@ class PostProcessor(object):
|
|||
|
||||
prline2 = 'Mylar has downloaded and post-processed: ' + prline
|
||||
|
||||
if mylar.PROWL_ENABLED:
|
||||
if mylar.CONFIG.PROWL_ENABLED:
|
||||
pushmessage = prline
|
||||
prowl = notifiers.PROWL()
|
||||
prowl.notify(pushmessage, "Download and Postprocessing completed", module=module)
|
||||
|
||||
if mylar.NMA_ENABLED:
|
||||
if mylar.CONFIG.NMA_ENABLED:
|
||||
nma = notifiers.NMA()
|
||||
nma.notify(prline=prline, prline2=prline2, module=module)
|
||||
|
||||
if mylar.PUSHOVER_ENABLED:
|
||||
if mylar.CONFIG.PUSHOVER_ENABLED:
|
||||
pushover = notifiers.PUSHOVER()
|
||||
pushover.notify(prline, prline2, module=module)
|
||||
|
||||
if mylar.BOXCAR_ENABLED:
|
||||
if mylar.CONFIG.BOXCAR_ENABLED:
|
||||
boxcar = notifiers.BOXCAR()
|
||||
boxcar.notify(prline=prline, prline2=prline2, module=module)
|
||||
|
||||
if mylar.PUSHBULLET_ENABLED:
|
||||
if mylar.CONFIG.PUSHBULLET_ENABLED:
|
||||
pushbullet = notifiers.PUSHBULLET()
|
||||
pushbullet.notify(prline=prline, prline2=prline2, module=module)
|
||||
|
||||
if mylar.TELEGRAM_ENABLED:
|
||||
if mylar.CONFIG.TELEGRAM_ENABLED:
|
||||
telegram = notifiers.TELEGRAM()
|
||||
telegram.notify(prline, prline2)
|
||||
|
||||
if mylar.SLACK_ENABLED:
|
||||
if mylar.CONFIG.SLACK_ENABLED:
|
||||
slack = notifiers.SLACK()
|
||||
slack.notify("Download and Postprocessing completed", prline, module=module)
|
||||
|
||||
|
@ -2565,8 +2570,8 @@ class FolderCheck():
|
|||
#junk the queue as it's not needed for folder monitoring, but needed for post-processing to run without error.
|
||||
helpers.job_management(write=True, job='Folder Monitor', current_run=helpers.utctimestamp(), status='Running')
|
||||
mylar.MONITOR_STATUS = 'Running'
|
||||
logger.info(self.module + ' Checking folder ' + mylar.CHECK_FOLDER + ' for newly snatched downloads')
|
||||
PostProcess = PostProcessor('Manual Run', mylar.CHECK_FOLDER, queue=self.queue)
|
||||
logger.info(self.module + ' Checking folder ' + mylar.CONFIG.CHECK_FOLDER + ' for newly snatched downloads')
|
||||
PostProcess = PostProcessor('Manual Run', mylar.CONFIG.CHECK_FOLDER, queue=self.queue)
|
||||
result = PostProcess.Process()
|
||||
logger.info(self.module + ' Finished checking for newly snatched downloads')
|
||||
helpers.job_management(write=True, job='Folder Monitor', last_run_completed=helpers.utctimestamp(), status='Waiting')
|
||||
|
|
22
mylar/api.py
22
mylar/api.py
|
@ -61,28 +61,28 @@ class Api(object):
|
|||
self.data = self._error_with_message('Missing parameter: cmd')
|
||||
return
|
||||
|
||||
if not mylar.API_ENABLED:
|
||||
if not mylar.CONFIG.API_ENABLED:
|
||||
if kwargs['apikey'] != mylar.DOWNLOAD_APIKEY:
|
||||
self.data = self._error_with_message('API not enabled')
|
||||
return
|
||||
|
||||
if kwargs['apikey'] != mylar.API_KEY and all([kwargs['apikey'] != mylar.DOWNLOAD_APIKEY, mylar.DOWNLOAD_APIKEY != None]):
|
||||
if kwargs['apikey'] != mylar.CONFIG.API_KEY and all([kwargs['apikey'] != mylar.DOWNLOAD_APIKEY, mylar.DOWNLOAD_APIKEY != None]):
|
||||
self.data = self._error_with_message('Incorrect API key')
|
||||
return
|
||||
else:
|
||||
if kwargs['apikey'] == mylar.API_KEY:
|
||||
if kwargs['apikey'] == mylar.CONFIG.API_KEY:
|
||||
self.apitype = 'normal'
|
||||
elif kwargs['apikey'] == mylar.DOWNLOAD_APIKEY:
|
||||
self.apitype = 'download'
|
||||
logger.fdebug('Matched to key. Api set to : ' + self.apitype + ' mode.')
|
||||
self.apikey = kwargs.pop('apikey')
|
||||
|
||||
if not([mylar.API_KEY, mylar.DOWNLOAD_APIKEY]):
|
||||
if not([mylar.CONFIG.API_KEY, mylar.DOWNLOAD_APIKEY]):
|
||||
self.data = self._error_with_message('API key not generated')
|
||||
return
|
||||
|
||||
if self.apitype:
|
||||
if self.apitype == 'normal' and len(mylar.API_KEY) != 32:
|
||||
if self.apitype == 'normal' and len(mylar.CONFIG.API_KEY) != 32:
|
||||
self.data = self._error_with_message('API key not generated correctly')
|
||||
return
|
||||
if self.apitype == 'download' and len(mylar.DOWNLOAD_APIKEY) != 32:
|
||||
|
@ -160,7 +160,7 @@ class Api(object):
|
|||
|
||||
comic = self._dic_from_query('SELECT * from comics WHERE ComicID="' + self.id + '"')
|
||||
issues = self._dic_from_query('SELECT * from issues WHERE ComicID="' + self.id + '"order by Int_IssueNumber DESC')
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
annuals = self._dic_from_query('SELECT * FROM annuals WHERE ComicID="' + self.id + '"')
|
||||
else:
|
||||
annuals = None
|
||||
|
@ -318,7 +318,7 @@ class Api(object):
|
|||
|
||||
def _getVersion(self, **kwargs):
|
||||
self.data = {
|
||||
'git_path': mylar.GIT_PATH,
|
||||
'git_path': mylar.CONFIG.GIT_PATH,
|
||||
'install_type': mylar.INSTALL_TYPE,
|
||||
'current_version': mylar.CURRENT_VERSION,
|
||||
'latest_version': mylar.LATEST_VERSION,
|
||||
|
@ -382,7 +382,7 @@ class Api(object):
|
|||
self.id = kwargs['id']
|
||||
|
||||
img = None
|
||||
image_path = os.path.join(mylar.CACHE_DIR, str(self.id) + '.jpg')
|
||||
image_path = os.path.join(mylar.CONFIG.CACHE_DIR, str(self.id) + '.jpg')
|
||||
|
||||
# Checks if its a valid path and file
|
||||
if os.path.isfile(image_path):
|
||||
|
@ -464,8 +464,8 @@ class Api(object):
|
|||
comiclocation = comic.get('ComicLocation')
|
||||
f = os.path.join(comiclocation, issuelocation)
|
||||
if not os.path.isfile(f):
|
||||
if mylar.MULTIPLE_DEST_DIRS is not None and mylar.MULTIPLE_DEST_DIRS != 'None':
|
||||
pathdir = os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(comiclocation))
|
||||
if mylar.CONFIG.MULTIPLE_DEST_DIRS is not None and mylar.CONFIG.MULTIPLE_DEST_DIRS != 'None':
|
||||
pathdir = os.path.join(mylar.CONFIG.MULTIPLE_DEST_DIRS, os.path.basename(comiclocation))
|
||||
f = os.path.join(pathdir, issuelocation)
|
||||
self.file = f
|
||||
self.filename = issuelocation
|
||||
|
@ -482,7 +482,7 @@ class Api(object):
|
|||
return
|
||||
|
||||
self.nzbname = nzbname
|
||||
f = os.path.join(mylar.CACHE_DIR, nzbname)
|
||||
f = os.path.join(mylar.CONFIG.CACHE_DIR, nzbname)
|
||||
if os.path.isfile(f):
|
||||
self.file = f
|
||||
self.filename = nzbname
|
||||
|
|
|
@ -33,7 +33,7 @@ class info32p(object):
|
|||
self.error = None
|
||||
self.method = None
|
||||
|
||||
lses = self.LoginSession(mylar.USERNAME_32P, mylar.PASSWORD_32P)
|
||||
lses = self.LoginSession(mylar.CONFIG.USERNAME_32P, mylar.CONFIG.PASSWORD_32P)
|
||||
|
||||
if not lses.login():
|
||||
if not self.test:
|
||||
|
@ -68,11 +68,11 @@ class info32p(object):
|
|||
try:
|
||||
with cfscrape.create_scraper() as s:
|
||||
s.headers = self.headers
|
||||
cj = LWPCookieJar(os.path.join(mylar.CACHE_DIR, ".32p_cookies.dat"))
|
||||
cj = LWPCookieJar(os.path.join(mylar.CONFIG.CACHE_DIR, ".32p_cookies.dat"))
|
||||
cj.load()
|
||||
s.cookies = cj
|
||||
|
||||
if mylar.VERIFY_32P == 1 or mylar.VERIFY_32P == True:
|
||||
if mylar.CONFIG.VERIFY_32P == 1 or mylar.CONFIG.VERIFY_32P == True:
|
||||
verify = True
|
||||
else:
|
||||
verify = False
|
||||
|
@ -148,13 +148,14 @@ class info32p(object):
|
|||
|
||||
#set the keys here that will be used to download.
|
||||
try:
|
||||
mylar.PASSKEY_32P = str(self.passkey)
|
||||
mylar.CONFIG.PASSKEY_32P = str(self.passkey)
|
||||
mylar.AUTHKEY_32P = str(self.authkey) # probably not needed here.
|
||||
mylar.KEYS_32P = {}
|
||||
mylar.KEYS_32P = {"user": str(self.uid),
|
||||
"auth": auth,
|
||||
"passkey": str(self.passkey),
|
||||
"authkey": str(self.authkey)}
|
||||
|
||||
except NameError:
|
||||
logger.warn('Unable to retrieve information from 32Pages - either it is not responding/is down or something else is happening that is stopping me.')
|
||||
return
|
||||
|
@ -163,7 +164,7 @@ class info32p(object):
|
|||
return
|
||||
else:
|
||||
mylar.FEEDINFO_32P = feedinfo
|
||||
return feedinfo
|
||||
return
|
||||
|
||||
def searchit(self):
|
||||
#self.searchterm is a tuple containing series name, issue number, volume and publisher.
|
||||
|
@ -187,7 +188,7 @@ class info32p(object):
|
|||
if comic_id:
|
||||
chk_id = helpers.checkthe_id(comic_id)
|
||||
|
||||
if any([not chk_id, mylar.DEEP_SEARCH_32P is True]):
|
||||
if any([not chk_id, mylar.CONFIG.DEEP_SEARCH_32P is True]):
|
||||
#generate the dynamic name of the series here so we can match it up
|
||||
as_d = filechecker.FileChecker()
|
||||
as_dinfo = as_d.dynamic_replace(series_search)
|
||||
|
@ -204,9 +205,11 @@ class info32p(object):
|
|||
if ',' in series_search:
|
||||
series_search = series_search[:series_search.find(',')]
|
||||
|
||||
if not mylar.SEARCH_32P:
|
||||
logger.info('search_32p: %s' % mylar.CONFIG.SEARCH_32P)
|
||||
if mylar.CONFIG.SEARCH_32P is False:
|
||||
url = 'https://walksoftly.itsaninja.party/serieslist.php'
|
||||
params = {'series': re.sub('\|','', mod_series.lower()).strip()} #series_search}
|
||||
logger.info('search query: %s' % re.sub('\|', '', mod_series.lower()).strip())
|
||||
try:
|
||||
t = requests.get(url, params=params, verify=True, headers={'USER-AGENT': mylar.USER_AGENT[:mylar.USER_AGENT.find('/')+7] + mylar.USER_AGENT[mylar.USER_AGENT.find('(')+1]})
|
||||
except requests.exceptions.RequestException as e:
|
||||
|
@ -225,21 +228,22 @@ class info32p(object):
|
|||
except:
|
||||
results = t.text
|
||||
|
||||
logger.info('results: %s' % results)
|
||||
if len(results) == 0:
|
||||
logger.warn('No results found for search on 32P.')
|
||||
return "no results"
|
||||
|
||||
with cfscrape.create_scraper() as s:
|
||||
s.headers = self.headers
|
||||
cj = LWPCookieJar(os.path.join(mylar.CACHE_DIR, ".32p_cookies.dat"))
|
||||
cj = LWPCookieJar(os.path.join(mylar.CONFIG.CACHE_DIR, ".32p_cookies.dat"))
|
||||
cj.load()
|
||||
s.cookies = cj
|
||||
data = []
|
||||
pdata = []
|
||||
pubmatch = False
|
||||
|
||||
if any([not chk_id, mylar.DEEP_SEARCH_32P is True]):
|
||||
if mylar.SEARCH_32P:
|
||||
if any([not chk_id, mylar.CONFIG.DEEP_SEARCH_32P is True]):
|
||||
if mylar.CONFIG.SEARCH_32P is True:
|
||||
url = 'https://32pag.es/torrents.php' #?action=serieslist&filter=' + series_search #&filter=F
|
||||
params = {'action': 'serieslist', 'filter': series_search}
|
||||
time.sleep(1) #just to make sure we don't hammer, 1s pause.
|
||||
|
@ -248,7 +252,7 @@ class info32p(object):
|
|||
results = soup.find_all("a", {"class":"object-qtip"},{"data-type":"torrentgroup"})
|
||||
|
||||
for r in results:
|
||||
if mylar.SEARCH_32P:
|
||||
if mylar.CONFIG.SEARCH_32P is True:
|
||||
torrentid = r['data-id']
|
||||
torrentname = r.findNext(text=True)
|
||||
torrentname = torrentname.strip()
|
||||
|
@ -373,7 +377,7 @@ class info32p(object):
|
|||
except Exception as e:
|
||||
logger.error(self.module + " Can't create session with cfscrape")
|
||||
|
||||
self.session_path = session_path if session_path is not None else os.path.join(mylar.CACHE_DIR, ".32p_cookies.dat")
|
||||
self.session_path = session_path if session_path is not None else os.path.join(mylar.CONFIG.CACHE_DIR, ".32p_cookies.dat")
|
||||
self.ses.cookies = LWPCookieJar(self.session_path)
|
||||
if not os.path.exists(self.session_path):
|
||||
logger.fdebug(self.module + ' Session cookie does not exist. Signing in and Creating.')
|
||||
|
|
|
@ -49,15 +49,15 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen
|
|||
import tempfile
|
||||
logger.info('Filepath: %s' %filepath)
|
||||
logger.info('Filename: %s' %filename)
|
||||
new_folder = tempfile.mkdtemp(prefix='mylar_', dir=mylar.CACHE_DIR) #prefix, suffix, dir
|
||||
new_folder = tempfile.mkdtemp(prefix='mylar_', dir=mylar.CONFIG.CACHE_DIR) #prefix, suffix, dir
|
||||
logger.info('New_Folder: %s' % new_folder)
|
||||
new_filepath = os.path.join(new_folder, filename)
|
||||
logger.info('New_Filepath: %s' % new_filepath)
|
||||
if mylar.FILE_OPTS == 'copy' and manualmeta == False:
|
||||
logger.info('Attempting to copy: %s' % mylar.FILE_OPTS)
|
||||
if mylar.CONFIG.FILE_OPTS == 'copy' and manualmeta == False:
|
||||
logger.info('Attempting to copy: %s' % mylar.CONFIG.FILE_OPTS)
|
||||
shutil.copy(filepath, new_filepath)
|
||||
else:
|
||||
logger.info('Attempting to move: %s' % mylar.FILE_OPTS)
|
||||
logger.info('Attempting to move: %s' % mylar.CONFIG.FILE_OPTS)
|
||||
shutil.move(filepath, new_filepath)
|
||||
filepath = new_filepath
|
||||
except:
|
||||
|
@ -83,14 +83,14 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen
|
|||
##set up default comictagger options here.
|
||||
#used for cbr - to - cbz conversion
|
||||
#depending on copy/move - eitehr we retain the rar or we don't.
|
||||
if mylar.FILE_OPTS == 'move':
|
||||
if mylar.CONFIG.FILE_OPTS == 'move':
|
||||
cbr2cbzoptions = ["-e", "--delete-rar"]
|
||||
else:
|
||||
cbr2cbzoptions = ["-e"]
|
||||
|
||||
tagoptions = ["-s"]
|
||||
if mylar.CMTAG_VOLUME:
|
||||
if mylar.CMTAG_START_YEAR_AS_VOLUME:
|
||||
if mylar.CONFIG.CMTAG_VOLUME:
|
||||
if mylar.CONFIG.CMTAG_START_YEAR_AS_VOLUME:
|
||||
comversion = str(comversion)
|
||||
else:
|
||||
if any([comversion is None, comversion == '', comversion == 'None']):
|
||||
|
@ -114,13 +114,13 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen
|
|||
ctcheck = re.sub("[^0-9]", "", ctversion[:ctend])
|
||||
ctcheck = re.sub('\.', '', ctcheck).strip()
|
||||
if int(ctcheck) >= int('1115'): # (v1.1.15)
|
||||
if mylar.COMICVINE_API == mylar.DEFAULT_CVAPI:
|
||||
if any([mylar.CONFIG.COMICVINE_API == 'None', mylar.CONFIG.COMICVINE_API is None]):
|
||||
logger.fdebug(module + ' ' + ctversion[:ctend] + ' being used - no personal ComicVine API Key supplied. Take your chances.')
|
||||
use_cvapi = "False"
|
||||
else:
|
||||
logger.fdebug(module + ' ' + ctversion[:ctend] + ' being used - using personal ComicVine API key supplied via mylar.')
|
||||
use_cvapi = "True"
|
||||
tagoptions.extend(["--cv-api-key", mylar.COMICVINE_API])
|
||||
tagoptions.extend(["--cv-api-key", mylar.CONFIG.COMICVINE_API])
|
||||
else:
|
||||
logger.fdebug(module + ' ' + ctversion[:ctend+1] + ' being used - personal ComicVine API key not supported in this version. Good luck.')
|
||||
use_cvapi = "False"
|
||||
|
@ -128,12 +128,12 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen
|
|||
i = 1
|
||||
tagcnt = 0
|
||||
|
||||
if mylar.CT_TAG_CR:
|
||||
if mylar.CONFIG.CT_TAG_CR:
|
||||
tagcnt = 1
|
||||
logger.fdebug(module + ' CR Tagging enabled.')
|
||||
|
||||
if mylar.CT_TAG_CBL:
|
||||
if not mylar.CT_TAG_CR: i = 2 #set the tag to start at cbl and end without doing another tagging.
|
||||
if mylar.CONFIG.CT_TAG_CBL:
|
||||
if not mylar.CONFIG.CT_TAG_CR: i = 2 #set the tag to start at cbl and end without doing another tagging.
|
||||
tagcnt = 2
|
||||
logger.fdebug(module + ' CBL Tagging enabled.')
|
||||
|
||||
|
@ -144,7 +144,7 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen
|
|||
|
||||
#if it's a cbz file - check if no-overwrite existing tags is enabled / disabled in config.
|
||||
if filename.endswith('.cbz'):
|
||||
if mylar.CT_CBZ_OVERWRITE:
|
||||
if mylar.CONFIG.CT_CBZ_OVERWRITE:
|
||||
logger.fdebug(module + ' Will modify existing tag blocks even if it exists.')
|
||||
else:
|
||||
logger.fdebug(module + ' Will NOT modify existing tag blocks even if they exist already.')
|
||||
|
@ -193,9 +193,9 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen
|
|||
script_cmdlog = script_cmd
|
||||
|
||||
else:
|
||||
logger.fdebug(module + ' Enabling ComicTagger script: ' + str(currentScriptName) + ' with options: ' + re.sub(f_tagoptions[f_tagoptions.index(mylar.COMICVINE_API)], 'REDACTED', str(f_tagoptions)))
|
||||
logger.fdebug(module + ' Enabling ComicTagger script: ' + str(currentScriptName) + ' with options: ' + re.sub(f_tagoptions[f_tagoptions.index(mylar.CONFIG.COMICVINE_API)], 'REDACTED', str(f_tagoptions)))
|
||||
# generate a safe command line string to execute the script and provide all the parameters
|
||||
script_cmdlog = re.sub(f_tagoptions[f_tagoptions.index(mylar.COMICVINE_API)], 'REDACTED', str(script_cmd))
|
||||
script_cmdlog = re.sub(f_tagoptions[f_tagoptions.index(mylar.CONFIG.COMICVINE_API)], 'REDACTED', str(script_cmd))
|
||||
|
||||
logger.fdebug(module + ' Executing command: ' +str(script_cmdlog))
|
||||
logger.fdebug(module + ' Absolute path to script: ' +script_cmd[0])
|
||||
|
@ -214,7 +214,7 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen
|
|||
error_remove = True
|
||||
else:
|
||||
tmpfilename = re.sub('Archive exported successfully to: ', '', out.rstrip())
|
||||
if mylar.FILE_OPTS == 'move':
|
||||
if mylar.CONFIG.FILE_OPTS == 'move':
|
||||
tmpfilename = re.sub('\(Original deleted\)', '', tmpfilename).strip()
|
||||
tmpf = tmpfilename.decode('utf-8')
|
||||
filepath = os.path.join(comicpath, tmpf)
|
||||
|
@ -240,7 +240,7 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen
|
|||
tidyup(og_filepath, new_filepath, new_folder, manualmeta)
|
||||
return 'corrupt'
|
||||
else:
|
||||
logger.warn(module + '[COMIC-TAGGER][CBR-TO-CBZ] Failed to convert cbr to cbz - check permissions on folder : ' + mylar.CACHE_DIR + ' and/or the location where Mylar is trying to tag the files from.')
|
||||
logger.warn(module + '[COMIC-TAGGER][CBR-TO-CBZ] Failed to convert cbr to cbz - check permissions on folder : ' + mylar.CONFIG.CACHE_DIR + ' and/or the location where Mylar is trying to tag the files from.')
|
||||
tidyup(og_filepath, new_filepath, new_folder, manualmeta)
|
||||
return 'fail'
|
||||
elif 'Cannot find' in out:
|
||||
|
@ -257,11 +257,11 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen
|
|||
logger.info(module + '[COMIC-TAGGER] Successfully wrote ' + tagdisp + ' [' + filepath + ']')
|
||||
i+=1
|
||||
except OSError, e:
|
||||
logger.warn(module + '[COMIC-TAGGER] Unable to run comictagger with the options provided: ' + re.sub(f_tagoptions[f_tagoptions.index(mylar.COMICVINE_API)], 'REDACTED', str(script_cmd)))
|
||||
logger.warn(module + '[COMIC-TAGGER] Unable to run comictagger with the options provided: ' + re.sub(f_tagoptions[f_tagoptions.index(mylar.CONFIG.COMICVINE_API)], 'REDACTED', str(script_cmd)))
|
||||
tidyup(filepath, new_filepath, new_folder, manualmeta)
|
||||
return "fail"
|
||||
|
||||
if mylar.CBR2CBZ_ONLY and initial_ctrun == False:
|
||||
if mylar.CONFIG.CBR2CBZ_ONLY and initial_ctrun == False:
|
||||
break
|
||||
|
||||
return filepath
|
||||
|
@ -269,7 +269,7 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen
|
|||
|
||||
def tidyup(filepath, new_filepath, new_folder, manualmeta):
|
||||
if all([new_filepath is not None, new_folder is not None]):
|
||||
if mylar.FILE_OPTS == 'copy' and manualmeta == False:
|
||||
if mylar.CONFIG.FILE_OPTS == 'copy' and manualmeta == False:
|
||||
if all([os.path.exists(new_folder), os.path.isfile(filepath)]):
|
||||
shutil.rmtree(new_folder)
|
||||
elif os.path.exists(new_filepath) and not os.path.exists(filepath):
|
||||
|
|
|
@ -0,0 +1,855 @@
|
|||
import itertools
|
||||
from collections import OrderedDict
|
||||
from operator import itemgetter
|
||||
|
||||
import os
|
||||
import codecs
|
||||
import shutil
|
||||
import re
|
||||
import ConfigParser
|
||||
import mylar
|
||||
from mylar import logger, helpers
|
||||
|
||||
config = ConfigParser.SafeConfigParser()
|
||||
|
||||
_CONFIG_DEFINITIONS = OrderedDict({
|
||||
#keyname, type, section, default
|
||||
'CONFIG_VERSION': (int, 'General', 6),
|
||||
'MINIMAL_INI': (bool, 'General', False),
|
||||
'OLDCONFIG_VERSION': (str, 'General', None),
|
||||
'AUTO_UPDATE': (bool, 'General', False),
|
||||
'CACHE_DIR': (str, 'General', None),
|
||||
'DYNAMIC_UPDATE': (int, 'General', 0),
|
||||
'REFRESH_CACHE': (int, 'General', 7),
|
||||
'ANNUALS_ON': (bool, 'General', False),
|
||||
'SYNO_FIX': (bool, 'General', False),
|
||||
'LAUNCH_BROWSER' : (bool, 'General', False),
|
||||
'WANTED_TAB_OFF': (bool, 'General', False),
|
||||
'ENABLE_RSS': (bool, 'General', False),
|
||||
'SEARCH_DELAY' : (int, 'General', 1),
|
||||
'GRABBAG_DIR': (str, 'General', None),
|
||||
'HIGHCOUNT': (int, 'General', 0),
|
||||
'MAINTAINSERIESFOLDER': (bool, 'General', False),
|
||||
'DESTINATION_DIR': (str, 'General', None), #if M_D_D_ is enabled, this will be the DEFAULT for writing
|
||||
'MULTIPLE_DEST_DIRS': (str, 'General', None), #Nothing will ever get written to these dirs - just for scanning, unless it's metatagging/renaming.
|
||||
'CREATE_FOLDERS': (bool, 'General', True),
|
||||
'DELETE_REMOVE_DIR': (bool, 'General', False),
|
||||
'UPCOMING_SNATCHED': (bool, 'General', True),
|
||||
'UPDATE_ENDED': (bool, 'General', False),
|
||||
'LOCMOVE': (bool, 'General', False),
|
||||
'NEWCOM_DIR': (str, 'General', None),
|
||||
'FFTONEWCOM_DIR': (bool, 'General', False),
|
||||
'FOLDER_SCAN_LOG_VERBOSE': (bool, 'General', False),
|
||||
'INTERFACE': (str, 'General', None),
|
||||
'CORRECT_METADATA': (bool, 'General', False),
|
||||
'MOVE_FILES': (bool, 'General', False),
|
||||
'RENAME_FILES': (bool, 'General', False),
|
||||
'FOLDER_FORMAT': (str, 'General', None),
|
||||
'FILE_FORMAT': (str, 'General', None),
|
||||
'REPLACE_SPACES': (bool, 'General', False),
|
||||
'REPLACE_CHAR': (str, 'General', None),
|
||||
'ZERO_LEVEL': (bool, 'General', False),
|
||||
'ZERO_LEVEL_N': (str, 'General', None),
|
||||
'LOWERCASE_FILENAMES': (bool, 'General', False),
|
||||
'IGNORE_HAVETOTAL': (bool, 'General', False),
|
||||
'SNATCHED_HAVETOTAL': (bool, 'General', False),
|
||||
'FAILED_DOWNLOAD_HANDLING': (bool, 'General', False),
|
||||
'FAILED_AUTO': (bool, 'General',False),
|
||||
'PREFERRED_QUALITY': (int, 'General', 0),
|
||||
'USE_MINSIZE': (bool, 'General', False),
|
||||
'MINSIZE': (str, 'General', None),
|
||||
'USE_MAXSIZE': (bool, 'General', False),
|
||||
'MAXSIZE': (str, 'General', None),
|
||||
'AUTOWANT_UPCOMING': (bool, 'General', True),
|
||||
'AUTOWANT_ALL': (bool, 'General', False),
|
||||
'COMIC_COVER_LOCAL': (bool, 'General', False),
|
||||
'ADD_TO_CSV': (bool, 'General', True),
|
||||
'SKIPPED2WANTED': (bool, 'General', False),
|
||||
'READ2FILENAME': (bool, 'General', False),
|
||||
'SEND2READ': (bool, 'General', False),
|
||||
'NZB_STARTUP_SEARCH': (bool, 'General', False),
|
||||
'UNICODE_ISSUENUMBER': (bool, 'General', False),
|
||||
|
||||
'RSS_CHECKINTERVAL': (int, 'Scheduler', 20),
|
||||
'SEARCH_INTERVAL': (int, 'Scheduler', 360),
|
||||
'DOWNLOAD_SCAN_INTERVAL': (int, 'Scheduler', 5),
|
||||
'CHECK_GITHUB_INTERVAL' : (int, 'Scheduler', 360),
|
||||
|
||||
'ALT_PULL' : (int, 'Weekly', 2),
|
||||
'PULL_REFRESH': (str, 'Weekly', None),
|
||||
'WEEKFOLDER': (bool, 'Weekly', False),
|
||||
'WEEKFOLDER_LOC': (str, 'Weekly', None),
|
||||
'WEEKFOLDER_FORMAT': (int, 'Weekly', 0),
|
||||
'INDIE_PUB': (int, 'Weekly', 75),
|
||||
'BIGGIE_PUB': (int, 'Weekly', 55),
|
||||
|
||||
'HTTP_PORT' : (int, 'Interface', 8090),
|
||||
'HTTP_HOST' : (str, 'Interface', None),
|
||||
'HTTP_USERNAME' : (str, 'Interface', None),
|
||||
'HTTP_PASSWORD' : (str, 'Interface', None),
|
||||
'HTTP_ROOT' : (str, 'Interface', None),
|
||||
'ENABLE_HTTPS' : (bool, 'Interface', False),
|
||||
'HTTPS_CERT' : (str, 'Interface', None),
|
||||
'HTTPS_KEY' : (str, 'Interface', None),
|
||||
'HTTPS_CHAIN' : (str, 'Interface', None),
|
||||
'HTTPS_FORCE_ON' : (bool, 'Interface', False),
|
||||
'HOST_RETURN' : (str, 'Interface', None),
|
||||
|
||||
'API_ENABLED' : (bool, 'API', False),
|
||||
'API_KEY' : (str, 'API', None),
|
||||
|
||||
'CVAPI_RATE' : (int, 'CV', 2),
|
||||
'COMICVINE_API': (str, 'CV', None),
|
||||
'BLACKLISTED_PUBLISHERS' : (str, 'CV', None),
|
||||
'CV_VERIFY': (bool, 'CV', True),
|
||||
'CV_ONLY': (bool, 'CV', False),
|
||||
'CV_ONETIMER': (bool, 'CV', True),
|
||||
'CVINFO': (bool, 'CV', False),
|
||||
|
||||
'LOG_DIR' : (str, 'Logs', None),
|
||||
'MAX_LOGSIZE' : (int, 'Logs', 10000000),
|
||||
'LOG_LEVEL': (int, 'Logs', 0),
|
||||
|
||||
'GIT_PATH' : (str, 'Git', None),
|
||||
'GIT_USER' : (str, 'Git', 'evilhero'),
|
||||
'GIT_BRANCH' : (str, 'Git', None),
|
||||
'CHECK_GITHUB' : (bool, 'Git', False),
|
||||
'CHECK_GITHUB_ON_STARTUP' : (bool, 'Git', False),
|
||||
|
||||
'ENFORCE_PERMS': (bool, 'Perms', True),
|
||||
'CHMOD_DIR': (str, 'Perms', 0777),
|
||||
'CHMOD_FILE': (str, 'Perms', 0660),
|
||||
'CHOWNER': (str, 'Perms', None),
|
||||
'CHGROUP': (str, 'Perms', None),
|
||||
|
||||
'ADD_COMICS': (bool, 'Import', False),
|
||||
'COMIC_DIR': (str, 'Import', None),
|
||||
'IMP_MOVE': (bool, 'Import', False),
|
||||
'IMP_RENAME': (bool, 'Import', False),
|
||||
'IMP_METADATA': (bool, 'Import', False), # should default to False - this is enabled for testing only.
|
||||
|
||||
'DUPECONSTRAINT': (str, 'Duplicates', None),
|
||||
'DDUMP': (bool, 'Duplicates', False),
|
||||
'DUPLICATE_DUMP': (str, 'Duplicates', None),
|
||||
|
||||
'PROWL_ENABLED': (bool, 'Prowl', False),
|
||||
'PROWL_PRIORITY': (int, 'Prowl', 0),
|
||||
'PROWL_KEYS': (str, 'Prowl', None),
|
||||
'PROWL_ONSNATCH': (bool, 'Prowl', False),
|
||||
|
||||
'NMA_ENABLED': (bool, 'NMA', False),
|
||||
'NMA_APIKEY': (str, 'NMA', None),
|
||||
'NMA_PRIORITY': (int, 'NMA', 0),
|
||||
'NMA_ONSNATCH': (bool, 'NMA', False),
|
||||
|
||||
'PUSHOVER_ENABLED': (bool, 'PUSHOVER', False),
|
||||
'PUSHOVER_PRIORITY': (int, 'PUSHOVER', 0),
|
||||
'PUSHOVER_APIKEY': (str, 'PUSHOVER', None),
|
||||
'PUSHOVER_USERKEY': (str, 'PUSHOVER', None),
|
||||
'PUSHOVER_ONSNATCH': (bool, 'PUSHOVER', False),
|
||||
|
||||
'BOXCAR_ENABLED': (bool, 'BOXCAR', False),
|
||||
'BOXCAR_ONSNATCH': (bool, 'BOXCAR', False),
|
||||
'BOXCAR_TOKEN': (str, 'BOXCAR', None),
|
||||
|
||||
'PUSHBULLET_ENABLED': (bool, 'PUSHBULLET', False),
|
||||
'PUSHBULLET_APIKEY': (str, 'PUSHBULLET', None),
|
||||
'PUSHBULLET_DEVICEID': (str, 'PUSHBULLET', None),
|
||||
'PUSHBULLET_CHANNEL_TAG': (str, 'PUSHBULLET', None),
|
||||
'PUSHBULLET_ONSNATCH': (bool, 'PUSHBULLET', False),
|
||||
|
||||
'TELEGRAM_ENABLED': (bool, 'TELEGRAM', False),
|
||||
'TELEGRAM_TOKEN': (str, 'TELEGRAM', None),
|
||||
'TELEGRAM_USERID': (str, 'TELEGRAM', None),
|
||||
'TELEGRAM_ONSNATCH': (bool, 'TELEGRAM', False),
|
||||
|
||||
'SLACK_ENABLED': (bool, 'SLACK', False),
|
||||
'SLACK_WEBHOOK_URL': (str, 'SLACK', None),
|
||||
'SLACK_ONSNATCH': (bool, 'SLACK', False),
|
||||
|
||||
'POST_PROCESSING': (bool, 'PostProcess', False),
|
||||
'FILE_OPTS': (str, 'PostProcess', 'move'),
|
||||
'SNATCHEDTORRENT_NOTIFY': (bool, 'PostProcess', False),
|
||||
'LOCAL_TORRENT_PP': (bool, 'PostProcess', False),
|
||||
'POST_PROCESSING_SCRIPT': (str, 'PostProcess', None),
|
||||
'ENABLE_EXTRA_SCRIPTS': (bool, 'PostProcess', False),
|
||||
'EXTRA_SCRIPTS': (str, 'PostProcess', None),
|
||||
'ENABLE_SNATCH_SCRIPT': (bool, 'PostProcess', False),
|
||||
'SNATCH_SCRIPT': (str, 'PostProcess', None),
|
||||
'ENABLE_PRE_SCRIPTS': (bool, 'PostProcess', False),
|
||||
'PRE_SCRIPTS': (str, 'PostProcess', None),
|
||||
'ENABLE_CHECK_FOLDER': (bool, 'PostProcess', False),
|
||||
'CHECK_FOLDER': (str, 'PostProcess', None),
|
||||
|
||||
'PROVIDER_ORDER': (str, 'Providers', None),
|
||||
'USENET_RETENTION': (int, 'Providers', 1500),
|
||||
|
||||
'NZB_DOWNLOADER': (int, 'Client', 0), #0': sabnzbd, #1': nzbget, #2': blackhole
|
||||
'TORRENT_DOWNLOADER': (int, 'Client', 0), #0': watchfolder, #1': uTorrent, #2': rTorrent, #3': transmission, #4': deluge, #5': qbittorrent
|
||||
|
||||
'SAB_HOST': (str, 'SABnzbd', None),
|
||||
'SAB_USERNAME': (str, 'SABnzbd', None),
|
||||
'SAB_PASSWORD': (str, 'SABnzbd', None),
|
||||
'SAB_APIKEY': (str, 'SABnzbd', None),
|
||||
'SAB_CATEGORY': (str, 'SABnzbd', None),
|
||||
'SAB_PRIORITY': (str, 'SABnzbd', None),
|
||||
'SAB_TO_MYLAR': (bool, 'SABnzbd', False),
|
||||
'SAB_DIRECTORY': (str, 'SABnzbd', None),
|
||||
|
||||
'NZBGET_HOST': (str, 'NZBGet', None),
|
||||
'NZBGET_PORT': (str, 'NZBGet', None),
|
||||
'NZBGET_USERNAME': (str, 'NZBGet', None),
|
||||
'NZBGET_PASSWORD': (str, 'NZBGet', None),
|
||||
'NZBGET_PRIORITY': (str, 'NZBGet', None),
|
||||
'NZBGET_CATEGORY': (str, 'NZBGet', None),
|
||||
'NZBGET_DIRECTORY': (str, 'NZBGet', None),
|
||||
|
||||
'BLACKHOLE_DIR': (str, 'Blackhole', None),
|
||||
|
||||
'ENABLE_TPSE': (bool, 'TPSE', False),
|
||||
'TPSE_PROXY': (str, 'TPSE', None),
|
||||
'TPSE_VERIFY': (bool, 'TPSE', True),
|
||||
|
||||
'NZBSU': (bool, 'NZBsu', False),
|
||||
'NZBSU_UID': (str, 'NZBsu', None),
|
||||
'NZBSU_APIKEY': (str, 'NZBsu', None),
|
||||
'NZBSU_VERIFY': (bool, 'NZBsu', True),
|
||||
|
||||
'DOGNZB': (bool, 'DOGnzb', False),
|
||||
'DOGNZB_APIKEY': (str, 'DOGnzb', None),
|
||||
'DOGNZB_VERIFY': (bool, 'DOGnzb', True),
|
||||
|
||||
'NEWZNAB': (bool, 'Newznab', False),
|
||||
'EXTRA_NEWZNABS': (str, 'Newznab', None),
|
||||
|
||||
'ENABLE_TORZNAB': (bool, 'Torznab', False),
|
||||
'TORZNAB_NAME': (str, 'Torznab', None),
|
||||
'TORZNAB_HOST': (str, 'Torznab', None),
|
||||
'TORZNAB_APIKEY': (str, 'Torznab', None),
|
||||
'TORZNAB_CATEGORY': (str, 'Torznab', None),
|
||||
'TORZNAB_VERIFY': (bool, 'Torznab', False),
|
||||
|
||||
'EXPERIMENTAL': (bool, 'Experimental', False),
|
||||
'ALTEXPERIMENTAL': (bool, 'Experimental', False),
|
||||
|
||||
'TAB_ENABLE': (bool, 'Tablet', False),
|
||||
'TAB_HOST': (str, 'Tablet', None),
|
||||
'TAB_USER': (str, 'Tablet', None),
|
||||
'TAB_PASS': (str, 'Tablet', None),
|
||||
'TAB_DIRECTORY': (str, 'Tablet', None),
|
||||
|
||||
'STORYARCDIR': (bool, 'StoryArc', False),
|
||||
'COPY2ARCDIR': (bool, 'StoryArc', False),
|
||||
'ARC_FOLDERFORMAT': (str, 'StoryArc', None),
|
||||
'ARC_FILEOPS': (str, 'StoryArc', 'copy'),
|
||||
|
||||
'LOCMOVE': (bool, 'Update', False),
|
||||
'NEWCOM_DIR': (str, 'Update', None),
|
||||
'FFTONEWCOM_DIR': (bool, 'Update', False),
|
||||
|
||||
'ENABLE_META': (bool, 'Metatagging', False),
|
||||
'CMTAGGER_PATH': (str, 'Metatagging', None),
|
||||
'CBR2CBZ_ONLY': (bool, 'Metatagging', False),
|
||||
'CT_TAG_CR': (bool, 'Metatagging', True),
|
||||
'CT_TAG_CBL': (bool, 'Metatagging', True),
|
||||
'CT_CBZ_OVERWRITE': (bool, 'Metatagging', False),
|
||||
'UNRAR_CMD': (str, 'Metatagging', None),
|
||||
'CT_SETTINGSPATH': (str, 'Metatagging', None),
|
||||
'CMTAG_VOLUME': (bool, 'Metatagging', True),
|
||||
'CMTAG_START_YEAR_AS_VOLUME': (bool, 'Metatagging', False),
|
||||
'SETDEFAULTVOLUME': (bool, 'Metatagging', False),
|
||||
|
||||
'ENABLE_TORRENTS': (bool, 'Torrents', False),
|
||||
'ENABLE_TORRENT_SEARCH': (bool, 'Torrents', False),
|
||||
'MINSEEDS': (int, 'Torrents', 0),
|
||||
'AUTO_SNATCH': (bool, 'Torrents', False),
|
||||
'AUTO_SNATCH_SCRIPT': (str, 'Torrents', None),
|
||||
'ALLOW_PACKS': (bool, 'Torrents', False),
|
||||
|
||||
'TORRENT_LOCAL': (bool, 'Watchdir', False),
|
||||
'LOCAL_WATCHDIR': (str, 'Watchdir', None),
|
||||
'TORRENT_SEEDBOX': (bool, 'Seedbox', False),
|
||||
'SEEDBOX_HOST': (str, 'Seedbox', None),
|
||||
'SEEDBOX_PORT': (str, 'Seedbox', None),
|
||||
'SEEDBOX_USER': (str, 'Seedbox', None),
|
||||
'SEEDBOX_PASS': (str, 'Seedbox', None),
|
||||
'SEEDBOX_WATCHDIR': (str, 'Seedbox', None),
|
||||
|
||||
'ENABLE_32P': (bool, '32P', False),
|
||||
'SEARCH_32P': (bool, '32P', False), #0': use WS to grab torrent groupings, #1': use 32P to grab torrent groupings
|
||||
'DEEP_SEARCH_32P': (bool, '32P', False), #0': do not take multiple search series results & use ref32p if available, #1= search each search series result for valid $
|
||||
'MODE_32P': (bool, '32P', False), #0': legacymode, #1': authmode
|
||||
'RSSFEED_32P': (str, '32P', None),
|
||||
'PASSKEY_32P': (str, '32P', None),
|
||||
'USERNAME_32P': (str, '32P', None),
|
||||
'PASSWORD_32P': (str, '32P', None),
|
||||
'VERIFY_32P': (bool, '32P', True),
|
||||
|
||||
'RTORRENT_HOST': (str, 'Rtorrent', None),
|
||||
'RTORRENT_AUTHENTICATION': (str, 'Rtorrent', 'basic'),
|
||||
'RTORRENT_RPC_URL': (str, 'Rtorrent', None),
|
||||
'RTORRENT_SSL': (bool, 'Rtorrent', False),
|
||||
'RTORRENT_VERIFY': (bool, 'Rtorrent', False),
|
||||
'RTORRENT_CA_BUNDLE': (str, 'Rtorrent', None),
|
||||
'RTORRENT_USERNAME': (str, 'Rtorrent', None),
|
||||
'RTORRENT_PASSWORD': (str, 'Rtorrent', None),
|
||||
'RTORRENT_STARTONLOAD': (bool, 'Rtorrent', False),
|
||||
'RTORRENT_LABEL': (str, 'Rtorrent', None),
|
||||
'RTORRENT_DIRECTORY': (str, 'Rtorrent', None),
|
||||
|
||||
'UTORRENT_HOST': (str, 'uTorrent', None),
|
||||
'UTORRENT_USERNAME': (str, 'uTorrent', None),
|
||||
'UTORRENT_PASSWORD': (str, 'uTorrent', None),
|
||||
'UTORRENT_LABEL': (str, 'uTorrent', None),
|
||||
|
||||
'TRANSMISSION_HOST': (str, 'Transmission', None),
|
||||
'TRANSMISSION_USERNAME': (str, 'Transmission', None),
|
||||
'TRANSMISSION_PASSWORD': (str, 'Transmission', None),
|
||||
'TRANSMISSION_DIRECTORY': (str, 'Transmission', None),
|
||||
|
||||
'DELUGE_HOST': (str, 'Deluge', None),
|
||||
'DELUGE_USERNAME': (str, 'Deluge', None),
|
||||
'DELUGE_PASSWORD': (str, 'Deluge', None),
|
||||
'DELUGE_LABEL': (str, 'Deluge', None),
|
||||
|
||||
'QBITTORRENT_HOST': (str, 'qBittorrent', None),
|
||||
'QBITTORRENT_USERNAME': (str, 'qBittorrent', None),
|
||||
'QBITTORRENT_PASSWORD': (str, 'qBittorrent', None),
|
||||
'QBITTORRENT_LABEL': (str, 'qBittorrent', None),
|
||||
'QBITTORRENT_FOLDER': (str, 'qBittorrent', None),
|
||||
'QBITTORRENT_STARTONLOAD': (bool, 'qBittorrent', False),
|
||||
|
||||
})
|
||||
|
||||
class Config(object):
|
||||
|
||||
def __init__(self, config_file):
|
||||
# initalize the config...
|
||||
self._config_file = config_file
|
||||
|
||||
def config_vals(self, update=False):
|
||||
if update is False:
|
||||
self.config = config.readfp(codecs.open(self._config_file, 'r', 'utf8')) #read(self._config_file)
|
||||
#check for empty config / new config
|
||||
count = sum(1 for line in open(self._config_file))
|
||||
self.newconfig = 7
|
||||
if count == 0:
|
||||
CONFIG_VERSION = 0
|
||||
MINIMALINI = False
|
||||
else:
|
||||
# get the config version first, since we need to know.
|
||||
try:
|
||||
CONFIG_VERSION = config.getint('General', 'config_version')
|
||||
except:
|
||||
CONFIG_VERSION = 0
|
||||
try:
|
||||
MINIMALINI = config.getboolean('General', 'minimal_ini')
|
||||
except:
|
||||
MINIMALINI = False
|
||||
|
||||
setattr(self, 'CONFIG_VERSION', CONFIG_VERSION)
|
||||
setattr(self, 'MINIMAL_INI', MINIMALINI)
|
||||
|
||||
config_values = []
|
||||
for k,v in _CONFIG_DEFINITIONS.iteritems():
|
||||
xv = []
|
||||
xv.append(k)
|
||||
for x in v:
|
||||
if x is None:
|
||||
x = 'None'
|
||||
xv.append(x)
|
||||
value = self.check_setting(xv)
|
||||
|
||||
if all([k != 'CONFIG_VERSION', k != 'MINIMAL_INI']):
|
||||
try:
|
||||
if v[0] == str and any([value == "", value is None, len(value) == 0, value == 'None']):
|
||||
value = v[2]
|
||||
except:
|
||||
value = v[2]
|
||||
|
||||
try:
|
||||
if v[0] == bool:
|
||||
value = self.argToBool(value)
|
||||
except:
|
||||
value = self.argToBool(v[2])
|
||||
try:
|
||||
if all([v[0] == int, str(value).isdigit()]):
|
||||
value = int(value)
|
||||
except:
|
||||
value = v[2]
|
||||
|
||||
setattr(self, k, value)
|
||||
|
||||
#just to ensure defaults are properly set...
|
||||
if any([value is None, value == 'None']):
|
||||
value = v[0](v[2])
|
||||
|
||||
|
||||
if all([self.MINIMAL_INI is True, str(value) != str(v[2])]) or self.MINIMAL_INI is False:
|
||||
try:
|
||||
config.add_section(v[1])
|
||||
except ConfigParser.DuplicateSectionError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
if config.has_section(v[1]):
|
||||
config.remove_option(v[1], k.lower())
|
||||
except ConfigParser.NoSectionError:
|
||||
continue
|
||||
|
||||
if all([config.has_section(v[1]), self.MINIMAL_INI is False]) or all([self.MINIMAL_INI is True, str(value) != str(v[2]), config.has_section(v[1])]):
|
||||
config.set(v[1], k.lower(), str(value))
|
||||
else:
|
||||
try:
|
||||
if config.has_section(v[1]):
|
||||
config.remove_option(v[1], k.lower())
|
||||
if len(dict(config.items(v[1]))) == 0:
|
||||
config.remove_section(v[1])
|
||||
except ConfigParser.NoSectionError:
|
||||
continue
|
||||
else:
|
||||
if k == 'CONFIG_VERSION':
|
||||
config.remove_option('General', 'dbuser')
|
||||
config.remove_option('General', 'dbpass')
|
||||
config.remove_option('General', 'dbchoice')
|
||||
config.remove_option('General', 'dbname')
|
||||
elif k == 'MINIMAL_INI':
|
||||
config.set(v[1], k.lower(), str(self.MINIMAL_INI))
|
||||
|
||||
def read(self):
|
||||
self.config_vals()
|
||||
setattr(self, 'EXTRA_NEWZNABS', self.get_extra_newznabs())
|
||||
if any([self.CONFIG_VERSION == 0, self.CONFIG_VERSION < self.newconfig]):
|
||||
try:
|
||||
shutil.move(self._config_file, os.path.join(mylar.DATA_DIR, 'config.ini.backup'))
|
||||
except:
|
||||
logger.warn('Unable to make proper backup of config file in %s' % os.path.join(mylar.DATA_DIR, 'config.ini.backup'))
|
||||
setattr(self, 'CONFIG_VERSION', str(self.newconfig))
|
||||
config.set('General', 'CONFIG_VERSION', str(self.newconfig))
|
||||
print('Updating config to newest version : %s' % self.newconfig)
|
||||
self.writeconfig()
|
||||
else:
|
||||
self.provider_sequence()
|
||||
self.configure()
|
||||
return self
|
||||
|
||||
def check_section(self, section, key):
|
||||
""" Check if INI section exists, if not create it """
|
||||
if config.has_section(section):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def argToBool(self, argument):
|
||||
_arg = argument.strip().lower() if isinstance(argument, basestring) else argument
|
||||
if _arg in (1, '1', 'on', 'true', True):
|
||||
return True
|
||||
elif _arg in (0, '0', 'off', 'false', False):
|
||||
return False
|
||||
return argument
|
||||
|
||||
def check_setting(self, key):
|
||||
""" Cast any value in the config to the right type or use the default """
|
||||
keyname = key[0].upper()
|
||||
inikey = key[0].lower()
|
||||
definition_type = key[1]
|
||||
section = key[2]
|
||||
default = key[3]
|
||||
myval = self.check_config(definition_type, section, inikey, default)
|
||||
if myval['status'] is False:
|
||||
if self.CONFIG_VERSION == 6:
|
||||
chkstatus = False
|
||||
if config.has_section('Torrents'):
|
||||
myval = self.check_config(definition_type, 'Torrents', inikey, default)
|
||||
if myval['status'] is True:
|
||||
chkstatus = True
|
||||
try:
|
||||
config.remove_option('Torrents', inikey)
|
||||
except ConfigParser.NoSectionError:
|
||||
pass
|
||||
if all([chkstatus is False, config.has_section('General')]):
|
||||
myval = self.check_config(definition_type, 'General', inikey, default)
|
||||
if myval['status'] is True:
|
||||
config.remove_option('General', inikey)
|
||||
|
||||
else:
|
||||
#print 'no key found in ini - setting to default value of %s' % definition_type(default)
|
||||
#myval = {'value': definition_type(default)}
|
||||
pass
|
||||
else:
|
||||
myval = {'value': definition_type(default)}
|
||||
#if all([myval['value'] is not None, myval['value'] != '', myval['value'] != 'None']):
|
||||
#if default != myval['value']:
|
||||
# print '%s : %s' % (keyname, myval['value'])
|
||||
#else:
|
||||
# print 'NEW CONFIGURATION SETTING %s : %s' % (keyname, myval['value'])
|
||||
return myval['value']
|
||||
|
||||
def check_config(self, definition_type, section, inikey, default):
|
||||
try:
|
||||
if definition_type == str:
|
||||
myval = {'status': True, 'value': config.get(section, inikey)}
|
||||
elif definition_type == int:
|
||||
myval = {'status': True, 'value': config.getint(section, inikey)}
|
||||
elif definition_type == bool:
|
||||
myval = {'status': True, 'value': config.getboolean(section, inikey)}
|
||||
except Exception:
|
||||
myval = {'status': False, 'value': None}
|
||||
return myval
|
||||
|
||||
def _define(self, name):
|
||||
key = name.upper()
|
||||
ini_key = name.lower()
|
||||
definition = _CONFIG_DEFINITIONS[key]
|
||||
if len(definition) == 3:
|
||||
definition_type, section, default = definition
|
||||
elif len(definition) == 4:
|
||||
definition_type, section, _, default = definition
|
||||
return key, definition_type, section, ini_key, default
|
||||
|
||||
|
||||
def process_kwargs(self, kwargs):
|
||||
"""
|
||||
Given a big bunch of key value pairs, apply them to the ini.
|
||||
"""
|
||||
for name, value in kwargs.items():
|
||||
if not any([(name.startswith('newznab') and name[-1].isdigit()), name.startswith('Torznab')]):
|
||||
key, definition_type, section, ini_key, default = self._define(name)
|
||||
try:
|
||||
if any([value == "", value is None, len(value) == 0]) and definition_type == str:
|
||||
value = default
|
||||
else:
|
||||
value = str(value)
|
||||
except:
|
||||
value = default
|
||||
try:
|
||||
if definition_type == bool:
|
||||
value = self.argToBool(value)
|
||||
except:
|
||||
value = self.argToBool(default)
|
||||
try:
|
||||
if all([definition_type == int, str(value).isdigit()]):
|
||||
value = int(value)
|
||||
except:
|
||||
value = default
|
||||
|
||||
#just to ensure defaults are properly set...
|
||||
if any([value is None, value == 'None']):
|
||||
value = definition_type(default)
|
||||
|
||||
if key != 'MINIMAL_INI':
|
||||
if value == 'None': nv = None
|
||||
else: nv = definition_type(value)
|
||||
setattr(self, key, nv)
|
||||
|
||||
#print('writing config value...[%s][%s] key: %s / ini_key: %s / value: %s [%s]' % (definition_type, section, key, ini_key, value, default))
|
||||
if all([self.MINIMAL_INI is True, definition_type(value) != definition_type(default)]) or self.MINIMAL_INI is False:
|
||||
try:
|
||||
config.add_section(section)
|
||||
except ConfigParser.DuplicateSectionError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
if config.has_section(section):
|
||||
config.remove_option(section, ini_key)
|
||||
if len(dict(config.items(section))) == 0:
|
||||
config.remove_section(section)
|
||||
except ConfigParser.NoSectionError:
|
||||
continue
|
||||
|
||||
if any([value is None, value == ""]):
|
||||
value = definition_type(default)
|
||||
if config.has_section(section) and (all([self.MINIMAL_INI is True, definition_type(value) != definition_type(default)]) or self.MINIMAL_INI is False):
|
||||
config.set(section, ini_key, str(value))
|
||||
|
||||
else:
|
||||
config.set(section, ini_key, str(self.MINIMAL_INI))
|
||||
|
||||
else:
|
||||
pass
|
||||
|
||||
def writeconfig(self):
|
||||
logger.fdebug("Writing configuration to file")
|
||||
self.provider_sequence()
|
||||
config.set('Newznab', 'extra_newznabs', ', '.join(self.write_extra_newznabs()))
|
||||
config.set('General', 'dynamic_update', str(self.DYNAMIC_UPDATE))
|
||||
try:
|
||||
with codecs.open(self._config_file, encoding='utf8', mode='w+') as configfile:
|
||||
config.write(configfile)
|
||||
except IOError as e:
|
||||
logger.warn("Error writing configuration file: %s", e)
|
||||
|
||||
def configure(self, update=False):
|
||||
if all(['http://' not in self.SAB_HOST[:7], 'https://' not in self.SAB_HOST[:8], self.SAB_HOST != '', self.SAB_HOST is not None]):
|
||||
self.SAB_HOST = 'http://' + self.SAB_HOST
|
||||
|
||||
if not update:
|
||||
logger.fdebug('Log dir: %s' % self.LOG_DIR)
|
||||
|
||||
if self.LOG_DIR is None:
|
||||
self.LOG_DIR = os.path.join(mylar.DATA_DIR, 'logs')
|
||||
|
||||
if not os.path.exists(self.LOG_DIR):
|
||||
try:
|
||||
os.makedirs(self.LOG_DIR)
|
||||
except OSError:
|
||||
if not QUIET:
|
||||
logger.warn('Unable to create the log directory. Logging to screen only.')
|
||||
|
||||
if not update:
|
||||
logger.fdebug('[Cache Check] Cache directory currently set to : ' + self.CACHE_DIR)
|
||||
|
||||
# Put the cache dir in the data dir for now
|
||||
if not self.CACHE_DIR:
|
||||
self.CACHE_DIR = os.path.join(str(mylar.DATA_DIR), 'cache')
|
||||
if not update:
|
||||
logger.fdebug('[Cache Check] Cache directory not found in configuration. Defaulting location to : ' + self.CACHE_DIR)
|
||||
|
||||
if not os.path.exists(self.CACHE_DIR):
|
||||
try:
|
||||
os.makedirs(self.CACHE_DIR)
|
||||
except OSError:
|
||||
logger.error('[Cache Check] Could not create cache dir. Check permissions of datadir: ' + mylar.DATA_DIR)
|
||||
|
||||
## Sanity checking
|
||||
if any([self.COMICVINE_API is None, self.COMICVINE_API == 'None', self.COMICVINE_API == '']):
|
||||
logger.error('No User Comicvine API key specified. I will not work very well due to api limits - http://api.comicvine.com/ and get your own free key.')
|
||||
mylar.CONFIG.COMICVINE_API = None
|
||||
|
||||
if self.SEARCH_INTERVAL < 360:
|
||||
logger.fdebug('Search interval too low. Resetting to 6 hour minimum')
|
||||
self.SEARCH_INTERVAL = 360
|
||||
|
||||
if self.SEARCH_DELAY < 1:
|
||||
logger.fdebug("Minimum search delay set for 1 minute to avoid hammering.")
|
||||
self.SEARCH_DELAY = 1
|
||||
|
||||
if self.RSS_CHECKINTERVAL < 20:
|
||||
logger.fdebug("Minimum RSS Interval Check delay set for 20 minutes to avoid hammering.")
|
||||
self.RSS_CHECKINTERVAL = 20
|
||||
|
||||
if not helpers.is_number(self.CHMOD_DIR):
|
||||
logger.fdebug("CHMOD Directory value is not a valid numeric - please correct. Defaulting to 0777")
|
||||
self.CHMOD_DIR = '0777'
|
||||
|
||||
if not helpers.is_number(self.CHMOD_FILE):
|
||||
logger.fdebug("CHMOD File value is not a valid numeric - please correct. Defaulting to 0660")
|
||||
self.CHMOD_FILE = '0660'
|
||||
|
||||
if self.SAB_HOST.endswith('/'):
|
||||
logger.fdebug("Auto-correcting trailing slash in SABnzbd url (not required)")
|
||||
self.SAB_HOST = self.SAB_HOST[:-1]
|
||||
|
||||
if self.FILE_OPTS is None:
|
||||
self.FILE_OPTS = 'move'
|
||||
|
||||
if any([self.FILE_OPTS == 'hardlink', self.FILE_OPTS == 'softlink']):
|
||||
#we can't have metatagging enabled with hard/soft linking. Forcibly disable it here just in case it's set on load.
|
||||
self.ENABLE_META = False
|
||||
|
||||
#comictagger - force to use included version if option is enabled.
|
||||
if self.ENABLE_META:
|
||||
mylar.CMTAGGER_PATH = mylar.PROG_DIR
|
||||
#we need to make sure the default folder setting for the comictagger settings exists so things don't error out
|
||||
mylar.CT_SETTINGSPATH = os.path.join(mylar.PROG_DIR, 'lib', 'comictaggerlib', 'ct_settings')
|
||||
if not update:
|
||||
logger.fdebug('Setting ComicTagger settings default path to : ' + mylar.CT_SETTINGSPATH)
|
||||
|
||||
if not os.path.exists(mylar.CT_SETTINGSPATH):
|
||||
try:
|
||||
os.mkdir(mylar.CT_SETTINGSPATH)
|
||||
except OSError,e:
|
||||
if e.errno != errno.EEXIST:
|
||||
logger.error('Unable to create setting directory for ComicTagger. This WILL cause problems when tagging.')
|
||||
else:
|
||||
logger.fdebug('Successfully created ComicTagger Settings location.')
|
||||
|
||||
mylar.USE_SABNZBD = False
|
||||
mylar.USE_NZBGET = False
|
||||
mylar.USE_BLACKHOLE = False
|
||||
|
||||
if self.NZB_DOWNLOADER == 0:
|
||||
mylar.USE_SABNZBD = True
|
||||
elif self.NZB_DOWNLOADER == 1:
|
||||
mylar.USE_NZBGET = True
|
||||
elif self.NZB_DOWNLOADER == 2:
|
||||
mylar.USE_BLACKHOLE = True
|
||||
else:
|
||||
#default to SABnzbd
|
||||
self.NZB_DOWNLOADER = 0
|
||||
mylar.USE_SABNZBD = True
|
||||
|
||||
if self.SAB_PRIORITY.isdigit():
|
||||
if self.SAB_PRIORITY == "0": self.SAB_PRIORITY = "Default"
|
||||
elif self.SAB_PRIORITY == "1": self.SAB_PRIORITY = "Low"
|
||||
elif self.SAB_PRIORITY == "2": self.SAB_PRIORITY = "Normal"
|
||||
elif self.SAB_PRIORITY == "3": self.SAB_PRIORITY = "High"
|
||||
elif self.SAB_PRIORITY == "4": self.SAB_PRIORITY = "Paused"
|
||||
else: self.SAB_PRIORITY = "Default"
|
||||
|
||||
setattr(self, 'TORRENT_LOCAL', False)
|
||||
mylar.USE_WATCHDIR = False
|
||||
mylar.USE_UTORRENT = False
|
||||
mylar.USE_RTORRENT = False
|
||||
mylar.USE_TRANSMISSION = False
|
||||
mylar.USE_DELUGE = False
|
||||
mylar.USE_QBITTORRENT = False
|
||||
if self.TORRENT_DOWNLOADER == 0:
|
||||
mylar.USE_WATCHDIR = True
|
||||
elif self.TORRENT_DOWNLOADER == 1:
|
||||
mylar.USE_UTORRENT = True
|
||||
elif self.TORRENT_DOWNLOADER == 2:
|
||||
mylar.USE_RTORRENT = True
|
||||
elif self.TORRENT_DOWNLOADER == 3:
|
||||
mylar.USE_TRANSMISSION = True
|
||||
elif self.TORRENT_DOWNLOADER == 4:
|
||||
mylar.USE_DELUGE = True
|
||||
elif self.TORRENT_DOWNLOADER == 5:
|
||||
mylar.USE_QBITTORRENT = True
|
||||
else:
|
||||
self.TORRENT_DOWNLOADER = 0
|
||||
mylar.USE_WATCHDIR = True
|
||||
|
||||
def get_extra_newznabs(self):
|
||||
extra_newznabs = zip(*[iter(self.EXTRA_NEWZNABS.split(', '))]*6)
|
||||
return extra_newznabs
|
||||
|
||||
def provider_sequence(self):
|
||||
PR = []
|
||||
PR_NUM = 0
|
||||
if self.ENABLE_TORRENT_SEARCH:
|
||||
if self.ENABLE_32P:
|
||||
PR.append('32p')
|
||||
PR_NUM +=1
|
||||
if self.ENABLE_TPSE:
|
||||
PR.append('tpse')
|
||||
PR_NUM +=1
|
||||
if self.NZBSU:
|
||||
PR.append('nzb.su')
|
||||
PR_NUM +=1
|
||||
if self.DOGNZB:
|
||||
PR.append('dognzb')
|
||||
PR_NUM +=1
|
||||
if self.EXPERIMENTAL:
|
||||
PR.append('Experimental')
|
||||
PR_NUM +=1
|
||||
if self.ENABLE_TORZNAB:
|
||||
PR.append('Torznab')
|
||||
PR_NUM +=1
|
||||
|
||||
PPR = ['32p', 'tpse', 'nzb.su', 'dognzb', 'Experimental', 'Torzanb']
|
||||
if self.NEWZNAB:
|
||||
for ens in self.EXTRA_NEWZNABS:
|
||||
if str(ens[5]) == '1': # if newznabs are enabled
|
||||
if ens[0] == "":
|
||||
en_name = ens[1]
|
||||
else:
|
||||
en_name = ens[0]
|
||||
PR.append(en_name)
|
||||
PPR.append(en_name)
|
||||
PR_NUM +=1
|
||||
|
||||
if self.PROVIDER_ORDER is not None:
|
||||
try:
|
||||
PRO_ORDER = zip(*[iter(self.PROVIDER_ORDER.split(', '))]*2)
|
||||
except:
|
||||
PO = []
|
||||
for k, v in self.PROVIDER_ORDER.iteritems():
|
||||
PO.append(k)
|
||||
PO.append(v)
|
||||
POR = ', '.join(PO)
|
||||
PRO_ORDER = zip(*[iter(POR.split(', '))]*2)
|
||||
|
||||
logger.info('provider_order: %s' % self.PROVIDER_ORDER)
|
||||
|
||||
#if provider order exists already, load it and then append to end any NEW entries.
|
||||
logger.fdebug('Provider sequence already pre-exists. Re-loading and adding/remove any new entries')
|
||||
TMPPR_NUM = 0
|
||||
PROV_ORDER = []
|
||||
#load original sequence
|
||||
for PRO in PRO_ORDER:
|
||||
PROV_ORDER.append({"order_seq": PRO[0],
|
||||
"provider": str(PRO[1])})
|
||||
TMPPR_NUM +=1
|
||||
|
||||
#calculate original sequence to current sequence for discrepancies
|
||||
#print('TMPPR_NUM: %s --- PR_NUM: %s' % (TMPPR_NUM, PR_NUM))
|
||||
if PR_NUM != TMPPR_NUM:
|
||||
logger.fdebug('existing Order count does not match New Order count')
|
||||
if PR_NUM > TMPPR_NUM:
|
||||
logger.fdebug('%s New entries exist, appending to end as default ordering' % (PR_NUM - TMPPR_NUM))
|
||||
TOTALPR = (TMPPR_NUM + PR_NUM)
|
||||
else:
|
||||
logger.fdebug('%s Disabled entries exist, removing from ordering sequence' % (TMPPR_NUM - PR_NUM))
|
||||
TOTALPR = TMPPR_NUM
|
||||
if PR_NUM > 0:
|
||||
logger.fdebug('%s entries are enabled.' % PR_NUM)
|
||||
|
||||
NEW_PROV_ORDER = []
|
||||
i = 0
|
||||
#this should loop over ALL possible entries
|
||||
while i < len(PR):
|
||||
found = False
|
||||
for d in PPR:
|
||||
#logger.fdebug('checking entry %s against %s' % (PR[i], d) #d['provider'])
|
||||
if d == PR[i]:
|
||||
x = [p['order_seq'] for p in PROV_ORDER if p['provider'] == PR[i]]
|
||||
if x:
|
||||
ord = x[0]
|
||||
else:
|
||||
ord = i
|
||||
found = {'provider': PR[i],
|
||||
'order': ord}
|
||||
break
|
||||
else:
|
||||
found = False
|
||||
|
||||
if found is not False:
|
||||
new_order_seqnum = len(NEW_PROV_ORDER)
|
||||
if new_order_seqnum <= found['order']:
|
||||
seqnum = found['order']
|
||||
else:
|
||||
seqnum = new_order_seqnum
|
||||
NEW_PROV_ORDER.append({"order_seq": len(NEW_PROV_ORDER),
|
||||
"provider": found['provider'],
|
||||
"orig_seq": int(seqnum)})
|
||||
i+=1
|
||||
|
||||
|
||||
#now we reorder based on priority of orig_seq, but use a new_order seq
|
||||
xa = 0
|
||||
NPROV = []
|
||||
for x in sorted(NEW_PROV_ORDER, key=itemgetter('orig_seq'), reverse=False):
|
||||
NPROV.append(str(xa))
|
||||
NPROV.append(x['provider'])
|
||||
xa+=1
|
||||
PROVIDER_ORDER = NPROV
|
||||
|
||||
else:
|
||||
#priority provider sequence in order#, ProviderName
|
||||
logger.fdebug('creating provider sequence order now...')
|
||||
TMPPR_NUM = 0
|
||||
PROV_ORDER = []
|
||||
while TMPPR_NUM < PR_NUM:
|
||||
PROV_ORDER.append(str(TMPPR_NUM))
|
||||
PROV_ORDER.append(PR[TMPPR_NUM])
|
||||
#{"order_seq": TMPPR_NUM,
|
||||
#"provider": str(PR[TMPPR_NUM])})
|
||||
TMPPR_NUM +=1
|
||||
PROVIDER_ORDER = PROV_ORDER
|
||||
|
||||
ll = ', '.join(PROVIDER_ORDER)
|
||||
if not config.has_section('Providers'):
|
||||
config.add_section('Providers')
|
||||
config.set('Providers', 'PROVIDER_ORDER', ll)
|
||||
|
||||
PROVIDER_ORDER = dict(zip(*[PROVIDER_ORDER[i::2] for i in range(2)]))
|
||||
setattr(self, 'PROVIDER_ORDER', PROVIDER_ORDER)
|
||||
logger.fdebug('Provider Order is now set : %s ' % self.PROVIDER_ORDER)
|
||||
|
||||
def write_extra_newznabs(self):
|
||||
flattened_newznabs = []
|
||||
for item in self.EXTRA_NEWZNABS:
|
||||
for i in item:
|
||||
flattened_newznabs.append(str(i))
|
||||
return flattened_newznabs
|
20
mylar/cv.py
20
mylar/cv.py
|
@ -46,17 +46,17 @@ def pulldetails(comicid, type, issueid=None, offset=1, arclist=None, comicidlist
|
|||
#import easy to use xml parser called minidom:
|
||||
from xml.dom.minidom import parseString
|
||||
|
||||
if mylar.COMICVINE_API == 'None' or mylar.COMICVINE_API is None or mylar.COMICVINE_API == mylar.DEFAULT_CVAPI:
|
||||
logger.warn('You have not specified your own ComicVine API key - alot of things will be limited. Get your own @ http://api.comicvine.com.')
|
||||
comicapi = mylar.DEFAULT_CVAPI
|
||||
if mylar.CONFIG.COMICVINE_API == 'None' or mylar.CONFIG.COMICVINE_API is None:
|
||||
logger.warn('You have not specified your own ComicVine API key - it\'s a requirement. Get your own @ http://api.comicvine.com.')
|
||||
return
|
||||
else:
|
||||
comicapi = mylar.COMICVINE_API
|
||||
comicapi = mylar.CONFIG.COMICVINE_API
|
||||
|
||||
if type == 'comic':
|
||||
if not comicid.startswith('4050-'): comicid = '4050-' + comicid
|
||||
PULLURL = mylar.CVURL + 'volume/' + str(comicid) + '/?api_key=' + str(comicapi) + '&format=xml&field_list=name,count_of_issues,issues,start_year,site_detail_url,image,publisher,description,first_issue,deck,aliases'
|
||||
elif type == 'issue':
|
||||
if mylar.CV_ONLY:
|
||||
if mylar.CONFIG.CV_ONLY:
|
||||
cv_type = 'issues'
|
||||
if arclist is None:
|
||||
searchset = 'filter=volume:' + str(comicid) + '&field_list=cover_date,description,id,image,issue_number,name,date_last_updated,store_date'
|
||||
|
@ -80,17 +80,17 @@ def pulldetails(comicid, type, issueid=None, offset=1, arclist=None, comicidlist
|
|||
|
||||
#logger.info('CV.PULLURL: ' + PULLURL)
|
||||
#new CV API restriction - one api request / second.
|
||||
if mylar.CVAPI_RATE is None or mylar.CVAPI_RATE < 2:
|
||||
if mylar.CONFIG.CVAPI_RATE is None or mylar.CONFIG.CVAPI_RATE < 2:
|
||||
time.sleep(2)
|
||||
else:
|
||||
time.sleep(mylar.CVAPI_RATE)
|
||||
time.sleep(mylar.CONFIG.CVAPI_RATE)
|
||||
|
||||
#download the file:
|
||||
#set payload to None for now...
|
||||
payload = None
|
||||
|
||||
try:
|
||||
r = requests.get(PULLURL, params=payload, verify=mylar.CV_VERIFY, headers=mylar.CV_HEADERS)
|
||||
r = requests.get(PULLURL, params=payload, verify=mylar.CONFIG.CV_VERIFY, headers=mylar.CV_HEADERS)
|
||||
except Exception, e:
|
||||
logger.warn('Error fetching data from ComicVine: %s' % (e))
|
||||
return
|
||||
|
@ -429,7 +429,7 @@ def GetComicInfo(comicid, dom, safechk=None):
|
|||
|
||||
def GetIssuesInfo(comicid, dom, arcid=None):
|
||||
subtracks = dom.getElementsByTagName('issue')
|
||||
if not mylar.CV_ONLY:
|
||||
if not mylar.CONFIG.CV_ONLY:
|
||||
cntiss = dom.getElementsByTagName('count_of_issues')[0].firstChild.wholeText
|
||||
logger.fdebug("issues I've counted: " + str(len(subtracks)))
|
||||
logger.fdebug("issues CV says it has: " + str(int(cntiss)))
|
||||
|
@ -445,7 +445,7 @@ def GetIssuesInfo(comicid, dom, arcid=None):
|
|||
issuech = []
|
||||
firstdate = '2099-00-00'
|
||||
for subtrack in subtracks:
|
||||
if not mylar.CV_ONLY:
|
||||
if not mylar.CONFIG.CV_ONLY:
|
||||
if (dom.getElementsByTagName('name')[n].firstChild) is not None:
|
||||
issue['Issue_Name'] = dom.getElementsByTagName('name')[n].firstChild.wholeText
|
||||
else:
|
||||
|
|
|
@ -235,15 +235,15 @@ class FileChecker(object):
|
|||
reading_order = None
|
||||
|
||||
#if it's a story-arc, make sure to remove any leading reading order #'s
|
||||
if self.sarc and mylar.READ2FILENAME:
|
||||
if self.sarc and mylar.CONFIG.READ2FILENAME:
|
||||
removest = modfilename.find('-') # the - gets removed above so we test for the first blank space...
|
||||
if mylar.FOLDER_SCAN_LOG_VERBOSE:
|
||||
if mylar.CONFIG.FOLDER_SCAN_LOG_VERBOSE:
|
||||
logger.fdebug('[SARC] Checking filename for Reading Order sequence - Reading Sequence Order found #: ' + str(modfilename[:removest]))
|
||||
if modfilename[:removest].isdigit() and removest <= 3:
|
||||
reading_order = {'reading_sequence': str(modfilename[:removest]),
|
||||
'filename': filename[removest+1:]}
|
||||
modfilename = modfilename[removest+1:]
|
||||
if mylar.FOLDER_SCAN_LOG_VERBOSE:
|
||||
if mylar.CONFIG.FOLDER_SCAN_LOG_VERBOSE:
|
||||
logger.fdebug('[SARC] Removed Reading Order sequence from subname. Now set to : ' + modfilename)
|
||||
|
||||
|
||||
|
@ -801,7 +801,7 @@ class FileChecker(object):
|
|||
series_name_decoded= unicodedata.normalize('NFKD', helpers.conversion(series_name)).encode('ASCII', 'ignore')
|
||||
|
||||
#check for annual in title(s) here.
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
if 'annual' in series_name.lower():
|
||||
issue_number = 'Annual ' + str(issue_number)
|
||||
series_name = re.sub('annual', '', series_name, flags=re.I).strip()
|
||||
|
@ -855,16 +855,16 @@ class FileChecker(object):
|
|||
series_name = series_info['series_name']
|
||||
filename = series_info['comicfilename']
|
||||
#compare here - match comparison against u_watchcomic.
|
||||
logger.info('Series_Name: ' + series_name + ' --- WatchComic: ' + self.watchcomic)
|
||||
#logger.fdebug('Series_Name: ' + series_name + ' --- WatchComic: ' + self.watchcomic)
|
||||
#check for dynamic handles here.
|
||||
mod_dynamicinfo = self.dynamic_replace(series_name)
|
||||
mod_seriesname = mod_dynamicinfo['mod_seriesname']
|
||||
mod_watchcomic = mod_dynamicinfo['mod_watchcomic']
|
||||
|
||||
mod_series_decoded = self.dynamic_replace(series_info['series_name_decoded'])
|
||||
mod_seriesname_decoded = mod_dynamicinfo['mod_seriesname']
|
||||
mod_seriesname_decoded = mod_series_decoded['mod_seriesname']
|
||||
mod_watch_decoded = self.dynamic_replace(self.og_watchcomic)
|
||||
mod_watchname_decoded = mod_dynamicinfo['mod_watchcomic']
|
||||
mod_watchname_decoded = mod_watch_decoded['mod_watchcomic']
|
||||
|
||||
#remove the spaces...
|
||||
nspace_seriesname = re.sub(' ', '', mod_seriesname)
|
||||
|
@ -872,12 +872,15 @@ class FileChecker(object):
|
|||
nspace_seriesname_decoded = re.sub(' ', '', mod_seriesname_decoded)
|
||||
nspace_watchname_decoded = re.sub(' ', '', mod_watchname_decoded)
|
||||
|
||||
if '127372873872871091383' not in self.AS_Alt:
|
||||
logger.fdebug('Possible Alternate Names to match against (if necessary): ' + str(self.AS_Alt))
|
||||
try:
|
||||
if self.AS_ALT[0] != '127372873872871091383 abdkhjhskjhkjdhakajhf':
|
||||
logger.fdebug('Possible Alternate Names to match against (if necessary): ' + str(self.AS_Alt))
|
||||
except:
|
||||
pass
|
||||
|
||||
justthedigits = series_info['issue_number']
|
||||
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
if 'annual' in series_name.lower():
|
||||
justthedigits = 'Annual ' + series_info['issue_number']
|
||||
nspace_seriesname = re.sub('annual', '', nspace_seriesname.lower()).strip()
|
||||
|
@ -894,10 +897,10 @@ class FileChecker(object):
|
|||
#loop through the Alternates picking out the ones that match and then do an overall loop.
|
||||
loopchk = [x for x in self.AS_Alt if re.sub('[\|\s]','', x.lower()).strip() == re.sub('[\|\s]','', nspace_seriesname.lower()).strip()]
|
||||
if len(loopchk) > 0 and loopchk[0] != '':
|
||||
if mylar.FOLDER_SCAN_LOG_VERBOSE:
|
||||
if mylar.CONFIG.FOLDER_SCAN_LOG_VERBOSE:
|
||||
logger.fdebug('[FILECHECKER] This should be an alternate: ' + str(loopchk))
|
||||
if 'annual' in series_name.lower():
|
||||
if mylar.FOLDER_SCAN_LOG_VERBOSE:
|
||||
if mylar.CONFIG.FOLDER_SCAN_LOG_VERBOSE:
|
||||
logger.fdebug('[FILECHECKER] Annual detected - proceeding')
|
||||
enable_annual = True
|
||||
|
||||
|
@ -910,29 +913,29 @@ class FileChecker(object):
|
|||
loopchk.append(nspace_watchcomic)
|
||||
if 'annual' in nspace_seriesname.lower():
|
||||
if 'biannual' in nspace_seriesname.lower():
|
||||
if mylar.FOLDER_SCAN_LOG_VERBOSE:
|
||||
if mylar.CONFIG.FOLDER_SCAN_LOG_VERBOSE:
|
||||
logger.fdebug('[FILECHECKER] BiAnnual detected - wouldn\'t Deadpool be proud?')
|
||||
nspace_seriesname = re.sub('biannual', '', nspace_seriesname).strip()
|
||||
enable_annual = True
|
||||
else:
|
||||
if mylar.FOLDER_SCAN_LOG_VERBOSE:
|
||||
if mylar.CONFIG.FOLDER_SCAN_LOG_VERBOSE:
|
||||
logger.fdebug('[FILECHECKER] Annual detected - proceeding cautiously.')
|
||||
nspace_seriesname = re.sub('annual', '', nspace_seriesname).strip()
|
||||
enable_annual = False
|
||||
|
||||
if mylar.FOLDER_SCAN_LOG_VERBOSE:
|
||||
if mylar.CONFIG.FOLDER_SCAN_LOG_VERBOSE:
|
||||
logger.fdebug('[FILECHECKER] Complete matching list of names to this file [' + str(len(loopchk)) + '] : ' + str(loopchk))
|
||||
|
||||
for loopit in loopchk:
|
||||
#now that we have the list of all possible matches for the watchcomic + alternate search names, we go through the list until we find a match.
|
||||
modseries_name = loopit
|
||||
if mylar.FOLDER_SCAN_LOG_VERBOSE:
|
||||
if mylar.CONFIG.FOLDER_SCAN_LOG_VERBOSE:
|
||||
logger.fdebug('[FILECHECKER] AS_Tuple : ' + str(self.AS_Tuple))
|
||||
for ATS in self.AS_Tuple:
|
||||
if mylar.FOLDER_SCAN_LOG_VERBOSE:
|
||||
if mylar.CONFIG.FOLDER_SCAN_LOG_VERBOSE:
|
||||
logger.fdebug('[FILECHECKER] ' + str(ATS['AS_Alternate']) + ' comparing to ' + nspace_seriesname)
|
||||
if re.sub('\|','', ATS['AS_Alternate'].lower()).strip() == re.sub('\|','', nspace_seriesname.lower()).strip():
|
||||
if mylar.FOLDER_SCAN_LOG_VERBOSE:
|
||||
if mylar.CONFIG.FOLDER_SCAN_LOG_VERBOSE:
|
||||
logger.fdebug('[FILECHECKER] Associating ComiciD : ' + str(ATS['ComicID']))
|
||||
annual_comicid = str(ATS['ComicID'])
|
||||
modseries_name = ATS['AS_Alternate']
|
||||
|
@ -942,15 +945,15 @@ class FileChecker(object):
|
|||
|
||||
if enable_annual:
|
||||
if annual_comicid is not None:
|
||||
if mylar.FOLDER_SCAN_LOG_VERBOSE:
|
||||
if mylar.CONFIG.FOLDER_SCAN_LOG_VERBOSE:
|
||||
logger.fdebug('enable annual is on')
|
||||
logger.fdebug('annual comicid is ' + str(annual_comicid))
|
||||
if 'biannual' in nspace_watchcomic.lower():
|
||||
if mylar.FOLDER_SCAN_LOG_VERBOSE:
|
||||
if mylar.CONFIG.FOLDER_SCAN_LOG_VERBOSE:
|
||||
logger.fdebug('bi annual detected')
|
||||
justthedigits = 'BiAnnual ' + justthedigits
|
||||
else:
|
||||
if mylar.FOLDER_SCAN_LOG_VERBOSE:
|
||||
if mylar.CONFIG.FOLDER_SCAN_LOG_VERBOSE:
|
||||
logger.fdebug('annual detected')
|
||||
justthedigits = 'Annual ' + justthedigits
|
||||
|
||||
|
@ -1035,7 +1038,7 @@ class FileChecker(object):
|
|||
spacer = ''
|
||||
for i in range(0, len(wdhm)):
|
||||
spacer+='|'
|
||||
mod_watchcomic = mod_watchcomic[:mf] + spacer + mod_watchcomic[mf+1:]
|
||||
mod_watchcomic = mod_watchcomic[:mf] + spacer + mod_watchcomic[mf+len(wdhm):]
|
||||
|
||||
for wdrm in watchdynamic_replacements_match:
|
||||
wdrm_find.extend([m.start() for m in re.finditer(wdrm.lower(), mod_watchcomic.lower())])
|
||||
|
@ -1051,7 +1054,8 @@ class FileChecker(object):
|
|||
#logger.fdebug('series dynamic handlers recognized : ' + str(seriesdynamic_handlers_match))
|
||||
seriesdynamic_replacements_match = [x for x in self.dynamic_replacements if x.lower() in series_name.lower()]
|
||||
#logger.fdebug('series dynamic replacements recognized : ' + str(seriesdynamic_replacements_match))
|
||||
mod_seriesname = re.sub('[\s\_\.\s+\#]', '', series_name)
|
||||
mod_seriesname = re.sub('[\s\s+\_\.]', '%$', series_name)
|
||||
mod_seriesname = re.sub('[\#]', '', mod_seriesname)
|
||||
ser_find = []
|
||||
sdrm_find = []
|
||||
if any([seriesdynamic_handlers_match, seriesdynamic_replacements_match]):
|
||||
|
@ -1078,13 +1082,15 @@ class FileChecker(object):
|
|||
mod_watchcomic = re.sub('\|+', '|', mod_watchcomic)
|
||||
if mod_watchcomic.endswith('|'):
|
||||
mod_watchcomic = mod_watchcomic[:-1]
|
||||
mod_watchcomic = re.sub('[\%\$]+', '', mod_watchcomic)
|
||||
|
||||
mod_seriesname = re.sub('\|+', '|', mod_seriesname)
|
||||
if mod_seriesname.endswith('|'):
|
||||
mod_seriesname = mod_seriesname[:-1]
|
||||
mod_seriesname = re.sub('[\%\$]+', '', mod_seriesname)
|
||||
|
||||
return {'mod_watchcomic': mod_watchcomic,
|
||||
'mod_seriesname': mod_seriesname}
|
||||
return {'mod_watchcomic': mod_watchcomic,
|
||||
'mod_seriesname': mod_seriesname}
|
||||
|
||||
def altcheck(self):
|
||||
#iniitate the alternate list here so we can add in the different alternate search names (if present)
|
||||
|
@ -1106,15 +1112,15 @@ class FileChecker(object):
|
|||
# if it's !! present, it's the comicid associated with the series as an added annual.
|
||||
# extract the !!, store it and then remove it so things will continue.
|
||||
as_start = AS_Alternate.find('!!')
|
||||
if mylar.FOLDER_SCAN_LOG_VERBOSE:
|
||||
if mylar.CONFIG.FOLDER_SCAN_LOG_VERBOSE:
|
||||
logger.fdebug('as_start: ' + str(as_start) + ' --- ' + str(AS_Alternate[as_start:]))
|
||||
as_end = AS_Alternate.find('##', as_start)
|
||||
if as_end == -1:
|
||||
as_end = len(AS_Alternate)
|
||||
if mylar.FOLDER_SCAN_LOG_VERBOSE:
|
||||
if mylar.CONFIG.FOLDER_SCAN_LOG_VERBOSE:
|
||||
logger.fdebug('as_start: ' + str(as_end) + ' --- ' + str(AS_Alternate[as_start:as_end]))
|
||||
AS_ComicID = AS_Alternate[as_start +2:as_end]
|
||||
if mylar.FOLDER_SCAN_LOG_VERBOSE:
|
||||
if mylar.CONFIG.FOLDER_SCAN_LOG_VERBOSE:
|
||||
logger.fdebug('[FILECHECKER] Extracted comicid for given annual : ' + str(AS_ComicID))
|
||||
AS_Alternate = re.sub('!!' + str(AS_ComicID), '', AS_Alternate)
|
||||
AS_tupled = True
|
||||
|
@ -1184,11 +1190,11 @@ def validateAndCreateDirectory(dir, create=False, module=None):
|
|||
logger.warn(module + ' Could not find comic directory: ' + dir)
|
||||
if create:
|
||||
if dir.strip():
|
||||
logger.info(module + ' Creating comic directory (' + str(mylar.CHMOD_DIR) + ') : ' + dir)
|
||||
logger.info(module + ' Creating comic directory (' + str(mylar.CONFIG.CHMOD_DIR) + ') : ' + dir)
|
||||
try:
|
||||
os.umask(0) # this is probably redudant, but it doesn't hurt to clear the umask here.
|
||||
if mylar.ENFORCE_PERMS:
|
||||
permission = int(mylar.CHMOD_DIR, 8)
|
||||
if mylar.CONFIG.ENFORCE_PERMS:
|
||||
permission = int(mylar.CONFIG.CHMOD_DIR, 8)
|
||||
os.makedirs(dir.rstrip(), permission)
|
||||
setperms(dir.rstrip(), True)
|
||||
else:
|
||||
|
@ -1212,50 +1218,50 @@ def setperms(path, dir=False):
|
|||
|
||||
try:
|
||||
os.umask(0) # this is probably redudant, but it doesn't hurt to clear the umask here.
|
||||
if mylar.CHGROUP:
|
||||
if mylar.CHOWNER is None or mylar.CHOWNER == 'None' or mylar.CHOWNER == '':
|
||||
if mylar.CONFIG.CHGROUP:
|
||||
if mylar.CONFIG.CHOWNER is None or mylar.CONFIG.CHOWNER == 'None' or mylar.CONFIG.CHOWNER == '':
|
||||
import getpass
|
||||
mylar.CHOWNER = getpass.getuser()
|
||||
mylar.CONFIG.CHOWNER = getpass.getuser()
|
||||
|
||||
if not mylar.CHOWNER.isdigit():
|
||||
if not mylar.CONFIG.CHOWNER.isdigit():
|
||||
from pwd import getpwnam
|
||||
chowner = getpwnam(mylar.CHOWNER)[2]
|
||||
chowner = getpwnam(mylar.CONFIG.CHOWNER)[2]
|
||||
else:
|
||||
chowner = int(mylar.CHOWNER)
|
||||
chowner = int(mylar.CONFIG.CHOWNER)
|
||||
|
||||
if not mylar.CHGROUP.isdigit():
|
||||
if not mylar.CONFIG.CHGROUP.isdigit():
|
||||
from grp import getgrnam
|
||||
chgroup = getgrnam(mylar.CHGROUP)[2]
|
||||
chgroup = getgrnam(mylar.CONFIG.CHGROUP)[2]
|
||||
else:
|
||||
chgroup = int(mylar.CHGROUP)
|
||||
chgroup = int(mylar.CONFIG.CHGROUP)
|
||||
|
||||
if dir:
|
||||
permission = int(mylar.CHMOD_DIR, 8)
|
||||
permission = int(mylar.CONFIG.CHMOD_DIR, 8)
|
||||
os.chmod(path, permission)
|
||||
os.chown(path, chowner, chgroup)
|
||||
else:
|
||||
for root, dirs, files in os.walk(path):
|
||||
for momo in dirs:
|
||||
permission = int(mylar.CHMOD_DIR, 8)
|
||||
permission = int(mylar.CONFIG.CHMOD_DIR, 8)
|
||||
os.chown(os.path.join(root, momo), chowner, chgroup)
|
||||
os.chmod(os.path.join(root, momo), permission)
|
||||
for momo in files:
|
||||
permission = int(mylar.CHMOD_FILE, 8)
|
||||
permission = int(mylar.CONFIG.CHMOD_FILE, 8)
|
||||
os.chown(os.path.join(root, momo), chowner, chgroup)
|
||||
os.chmod(os.path.join(root, momo), permission)
|
||||
|
||||
logger.fdebug('Successfully changed ownership and permissions [' + str(mylar.CHOWNER) + ':' + str(mylar.CHGROUP) + '] / [' + str(mylar.CHMOD_DIR) + ' / ' + str(mylar.CHMOD_FILE) + ']')
|
||||
logger.fdebug('Successfully changed ownership and permissions [' + str(mylar.CONFIG.CHOWNER) + ':' + str(mylar.CONFIG.CHGROUP) + '] / [' + str(mylar.CONFIG.CHMOD_DIR) + ' / ' + str(mylar.CONFIG.CHMOD_FILE) + ']')
|
||||
|
||||
else:
|
||||
for root, dirs, files in os.walk(path):
|
||||
for momo in dirs:
|
||||
permission = int(mylar.CHMOD_DIR, 8)
|
||||
permission = int(mylar.CONFIG.CHMOD_DIR, 8)
|
||||
os.chmod(os.path.join(root, momo), permission)
|
||||
for momo in files:
|
||||
permission = int(mylar.CHMOD_FILE, 8)
|
||||
permission = int(mylar.CONFIG.CHMOD_FILE, 8)
|
||||
os.chmod(os.path.join(root, momo), permission)
|
||||
|
||||
logger.fdebug('Successfully changed permissions [' + str(mylar.CHMOD_DIR) + ' / ' + str(mylar.CHMOD_FILE) + ']')
|
||||
logger.fdebug('Successfully changed permissions [' + str(mylar.CONFIG.CHMOD_DIR) + ' / ' + str(mylar.CONFIG.CHMOD_FILE) + ']')
|
||||
|
||||
except OSError:
|
||||
logger.error('Could not change permissions : ' + path + '. Exiting...')
|
||||
|
|
|
@ -31,8 +31,8 @@ def Startit(searchName, searchIssue, searchYear, ComicVersion, IssDateFix):
|
|||
searchIsOne = "0" +searchIssue
|
||||
searchIsTwo = "00" +searchIssue
|
||||
|
||||
if mylar.PREFERRED_QUALITY == 1: joinSearch = joinSearch + " .cbr"
|
||||
elif mylar.PREFERRED_QUALITY == 2: joinSearch = joinSearch + " .cbz"
|
||||
if mylar.CONFIG.PREFERRED_QUALITY == 1: joinSearch = joinSearch + " .cbr"
|
||||
elif mylar.CONFIG.PREFERRED_QUALITY == 2: joinSearch = joinSearch + " .cbz"
|
||||
|
||||
if "-" in searchName:
|
||||
searchName = searchName.replace("-", '((\\s)?[-:])?(\\s)?')
|
||||
|
@ -43,22 +43,22 @@ def Startit(searchName, searchIssue, searchYear, ComicVersion, IssDateFix):
|
|||
#logger.fdebug('searchName:' + searchName)
|
||||
#logger.fdebug('regexName:' + regexName)
|
||||
|
||||
if mylar.USE_MINSIZE:
|
||||
size_constraints = "minsize=" + str(mylar.MINSIZE)
|
||||
if mylar.CONFIG.USE_MINSIZE:
|
||||
size_constraints = "minsize=" + str(mylar.CONFIG.MINSIZE)
|
||||
else:
|
||||
size_constraints = "minsize=10"
|
||||
|
||||
if mylar.USE_MAXSIZE:
|
||||
size_constraints = size_constraints + "&maxsize=" + str(mylar.MAXSIZE)
|
||||
if mylar.CONFIG.USE_MAXSIZE:
|
||||
size_constraints = size_constraints + "&maxsize=" + str(mylar.CONFIG.MAXSIZE)
|
||||
|
||||
if mylar.USENET_RETENTION != None:
|
||||
max_age = "&age=" + str(mylar.USENET_RETENTION)
|
||||
if mylar.CONFIG.USENET_RETENTION != None:
|
||||
max_age = "&age=" + str(mylar.CONFIG.USENET_RETENTION)
|
||||
|
||||
feeds = []
|
||||
feed1 = "http://nzbindex.nl/rss/alt.binaries.comics.dcp/?sort=agedesc&" + str(size_constraints) + str(max_age) + "&dq=%s&max=50&more=1" %joinSearch
|
||||
feeds.append(feedparser.parse("http://nzbindex.nl/rss/alt.binaries.comics.dcp/?sort=agedesc&" + str(size_constraints) + str(max_age) + "&dq=%s&max=50&more=1" %joinSearch))
|
||||
time.sleep(3)
|
||||
if mylar.ALTEXPERIMENTAL:
|
||||
if mylar.CONFIG.ALTEXPERIMENTAL:
|
||||
feed2 = "http://nzbindex.nl/rss/?dq=%s&g[]=41&g[]=510&sort=agedesc&hidespam=0&max=&more=1" %joinSearch
|
||||
feeds.append(feedparser.parse("http://nzbindex.nl/rss/?dq=%s&g[]=41&g[]=510&sort=agedesc&hidespam=0&max=&more=1" %joinSearch))
|
||||
time.sleep(3)
|
||||
|
|
|
@ -17,15 +17,15 @@ def putfile(localpath, file): #localpath=full path to .torrent (including fil
|
|||
logger.fdebug('aborting send.')
|
||||
return "fail"
|
||||
|
||||
host = mylar.SEEDBOX_HOST
|
||||
port = int(mylar.SEEDBOX_PORT) #this is usually 22
|
||||
host = mylar.CONFIG.SEEDBOX_HOST
|
||||
port = int(mylar.CONFIG.SEEDBOX_PORT) #this is usually 22
|
||||
transport = paramiko.Transport((host, port))
|
||||
|
||||
logger.fdebug('Sending file: ' + str(file))
|
||||
logger.fdebug('destination: ' + str(host))
|
||||
logger.fdebug('Using SSH port : ' + str(port))
|
||||
password = mylar.SEEDBOX_PASS
|
||||
username = mylar.SEEDBOX_USER
|
||||
password = mylar.CONFIG.SEEDBOX_PASS
|
||||
username = mylar.CONFIG.SEEDBOX_USER
|
||||
transport.connect(username = username, password = password)
|
||||
|
||||
sftp = paramiko.SFTPClient.from_transport(transport)
|
||||
|
@ -33,7 +33,7 @@ def putfile(localpath, file): #localpath=full path to .torrent (including fil
|
|||
import sys
|
||||
if file[-7:] != "torrent":
|
||||
file += ".torrent"
|
||||
rempath = os.path.join(mylar.SEEDBOX_WATCHDIR, file) #this will default to the OS running mylar for slashes.
|
||||
rempath = os.path.join(mylar.CONFIG.SEEDBOX_WATCHDIR, file) #this will default to the OS running mylar for slashes.
|
||||
logger.fdebug('remote path set to ' + str(rempath))
|
||||
logger.fdebug('local path set to ' + str(localpath))
|
||||
|
||||
|
@ -78,22 +78,22 @@ def sendfiles(filelist):
|
|||
logger.fdebug('aborting send.')
|
||||
return
|
||||
|
||||
fhost = mylar.TAB_HOST.find(':')
|
||||
host = mylar.TAB_HOST[:fhost]
|
||||
port = int(mylar.TAB_HOST[fhost +1:])
|
||||
fhost = mylar.CONFIG.TAB_HOST.find(':')
|
||||
host = mylar.CONFIG.TAB_HOST[:fhost]
|
||||
port = int(mylar.CONFIG.TAB_HOST[fhost +1:])
|
||||
|
||||
logger.fdebug('Destination: ' + host)
|
||||
logger.fdebug('Using SSH port : ' + str(port))
|
||||
|
||||
transport = paramiko.Transport((host, port))
|
||||
|
||||
password = mylar.TAB_PASS
|
||||
username = mylar.TAB_USER
|
||||
password = mylar.CONFIG.TAB_PASS
|
||||
username = mylar.CONFIG.TAB_USER
|
||||
transport.connect(username = username, password = password)
|
||||
|
||||
sftp = paramiko.SFTPClient.from_transport(transport)
|
||||
|
||||
remotepath = mylar.TAB_DIRECTORY
|
||||
remotepath = mylar.CONFIG.TAB_DIRECTORY
|
||||
logger.fdebug('remote path set to ' + remotepath)
|
||||
|
||||
if len(filelist) > 0:
|
||||
|
@ -102,9 +102,9 @@ def sendfiles(filelist):
|
|||
|
||||
|
||||
def sendtohome(sftp, remotepath, filelist, transport):
|
||||
fhost = mylar.TAB_HOST.find(':')
|
||||
host = mylar.TAB_HOST[:fhost]
|
||||
port = int(mylar.TAB_HOST[fhost +1:])
|
||||
fhost = mylar.CONFIG.TAB_HOST.find(':')
|
||||
host = mylar.CONFIG.TAB_HOST[:fhost]
|
||||
port = int(mylar.CONFIG.TAB_HOST[fhost +1:])
|
||||
|
||||
successlist = []
|
||||
filestotal = len(filelist)
|
||||
|
@ -130,7 +130,7 @@ def sendtohome(sftp, remotepath, filelist, transport):
|
|||
|
||||
remdir = remotepath
|
||||
|
||||
if mylar.MAINTAINSERIESFOLDER == 1:
|
||||
if mylar.CONFIG.MAINTAINSERIESFOLDER == 1:
|
||||
# Get folder path of issue
|
||||
comicdir = os.path.split(files['filepath'])[0]
|
||||
# Isolate comic folder name
|
||||
|
@ -179,7 +179,7 @@ def sendtohome(sftp, remotepath, filelist, transport):
|
|||
transport.close()
|
||||
#reload the transport here cause it locked up previously.
|
||||
transport = paramiko.Transport((host, port))
|
||||
transport.connect(username=mylar.TAB_USER, password=mylar.TAB_PASS)
|
||||
transport.connect(username=mylar.CONFIG.TAB_USER, password=mylar.CONFIG.TAB_PASS)
|
||||
sftp = paramiko.SFTPClient.from_transport(transport)
|
||||
count+=1
|
||||
if count > 5:
|
||||
|
@ -208,7 +208,7 @@ def sendtohome(sftp, remotepath, filelist, transport):
|
|||
transport.close()
|
||||
#reload the transport here cause it locked up previously.
|
||||
transport = paramiko.Transport((host, port))
|
||||
transport.connect(username=mylar.TAB_USER, password=mylar.TAB_PASS)
|
||||
transport.connect(username=mylar.CONFIG.TAB_USER, password=mylar.CONFIG.TAB_PASS)
|
||||
sftp = paramiko.SFTPClient.from_transport(transport)
|
||||
count+=1
|
||||
if count > 5:
|
||||
|
|
243
mylar/helpers.py
243
mylar/helpers.py
|
@ -289,7 +289,7 @@ def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=N
|
|||
#this has to be adjusted to be able to include story arc issues that span multiple arcs
|
||||
chkissue = myDB.selectone("SELECT * from readinglist WHERE ComicID=? AND Issue_Number=?", [comicid, issue]).fetchone()
|
||||
else:
|
||||
if all([annualize is None, not mylar.ANNUALS_ON]):
|
||||
if all([annualize is None, not mylar.CONFIG.ANNUALS_ON]):
|
||||
chkissue = myDB.selectone("SELECT * from issues WHERE ComicID=? AND Issue_Number=?", [comicid, issue]).fetchone()
|
||||
else:
|
||||
chkissue = myDB.selectone("SELECT * from annuals WHERE ComicID=? AND Issue_Number=?", [comicid, issue]).fetchone()
|
||||
|
@ -300,7 +300,7 @@ def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=N
|
|||
chkissue = myDB.selectone("SELECT * from readinglist WHERE ComicID=? AND Int_IssueNumber=?", [comicid, issuedigits(issue)]).fetchone()
|
||||
else:
|
||||
chkissue = myDB.selectone("SELECT * from issues WHERE ComicID=? AND Int_IssueNumber=?", [comicid, issuedigits(issue)]).fetchone()
|
||||
if all([annualize == 'yes', mylar.ANNUALS_ON]):
|
||||
if all([annualize == 'yes', mylar.CONFIG.ANNUALS_ON]):
|
||||
chkissue = myDB.selectone("SELECT * from annuals WHERE ComicID=? AND Int_IssueNumber=?", [comicid, issuedigits(issue)]).fetchone()
|
||||
|
||||
if chkissue is None:
|
||||
|
@ -340,14 +340,14 @@ def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=N
|
|||
seriesfilename = series #Alternate FileNaming is not available with story arcs.
|
||||
seriesyear = issuenzb['SeriesYear']
|
||||
arcdir = filesafe(issuenzb['StoryArc'])
|
||||
if mylar.REPLACE_SPACES:
|
||||
arcdir = arcdir.replace(' ', mylar.REPLACE_CHAR)
|
||||
if mylar.STORYARCDIR:
|
||||
storyarcd = os.path.join(mylar.DESTINATION_DIR, "StoryArcs", arcdir)
|
||||
if mylar.CONFIG.REPLACE_SPACES:
|
||||
arcdir = arcdir.replace(' ', mylar.CONFIG.REPLACE_CHAR)
|
||||
if mylar.CONFIG.STORYARCDIR:
|
||||
storyarcd = os.path.join(mylar.CONFIG.DESTINATION_DIR, "StoryArcs", arcdir)
|
||||
logger.fdebug('Story Arc Directory set to : ' + storyarcd)
|
||||
else:
|
||||
logger.fdebug('Story Arc Directory set to : ' + mylar.GRABBAG_DIR)
|
||||
storyarcd = os.path.join(mylar.DESTINATION_DIR, mylar.GRABBAG_DIR)
|
||||
logger.fdebug('Story Arc Directory set to : ' + mylar.CONFIG.GRABBAG_DIR)
|
||||
storyarcd = os.path.join(mylar.CONFIG.DESTINATION_DIR, mylar.CONFIG.GRABBAG_DIR)
|
||||
|
||||
comlocation = storyarcd
|
||||
comversion = None #need to populate this.
|
||||
|
@ -449,14 +449,14 @@ def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=N
|
|||
iss = issuenum
|
||||
issueno = iss
|
||||
# issue zero-suppression here
|
||||
if mylar.ZERO_LEVEL == "0":
|
||||
if mylar.CONFIG.ZERO_LEVEL == "0":
|
||||
zeroadd = ""
|
||||
else:
|
||||
if mylar.ZERO_LEVEL_N == "none": zeroadd = ""
|
||||
elif mylar.ZERO_LEVEL_N == "0x": zeroadd = "0"
|
||||
elif mylar.ZERO_LEVEL_N == "00x": zeroadd = "00"
|
||||
if mylar.CONFIG.ZERO_LEVEL_N == "none": zeroadd = ""
|
||||
elif mylar.CONFIG.ZERO_LEVEL_N == "0x": zeroadd = "0"
|
||||
elif mylar.CONFIG.ZERO_LEVEL_N == "00x": zeroadd = "00"
|
||||
|
||||
logger.fdebug('Zero Suppression set to : ' + str(mylar.ZERO_LEVEL_N))
|
||||
logger.fdebug('Zero Suppression set to : ' + str(mylar.CONFIG.ZERO_LEVEL_N))
|
||||
prettycomiss = None
|
||||
|
||||
if issueno.isalpha():
|
||||
|
@ -496,10 +496,10 @@ def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=N
|
|||
prettycomiss = str(zeroadd) + str(iss)
|
||||
if issue_except != 'None':
|
||||
prettycomiss = str(prettycomiss) + issue_except
|
||||
logger.fdebug('Zero level supplement set to ' + str(mylar.ZERO_LEVEL_N) + '. Issue will be set as : ' + str(prettycomiss))
|
||||
logger.fdebug('Zero level supplement set to ' + str(mylar.CONFIG.ZERO_LEVEL_N) + '. Issue will be set as : ' + str(prettycomiss))
|
||||
elif int(issueno) >= 10 and int(issueno) < 100:
|
||||
logger.fdebug('issue detected greater than 10, but less than 100')
|
||||
if mylar.ZERO_LEVEL_N == "none":
|
||||
if mylar.CONFIG.ZERO_LEVEL_N == "none":
|
||||
zeroadd = ""
|
||||
else:
|
||||
zeroadd = "0"
|
||||
|
@ -513,7 +513,7 @@ def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=N
|
|||
prettycomiss = str(zeroadd) + str(iss)
|
||||
if issue_except != 'None':
|
||||
prettycomiss = str(prettycomiss) + issue_except
|
||||
logger.fdebug('Zero level supplement set to ' + str(mylar.ZERO_LEVEL_N) + '.Issue will be set as : ' + str(prettycomiss))
|
||||
logger.fdebug('Zero level supplement set to ' + str(mylar.CONFIG.ZERO_LEVEL_N) + '.Issue will be set as : ' + str(prettycomiss))
|
||||
else:
|
||||
logger.fdebug('issue detected greater than 100')
|
||||
if issuenum == 'infinity':
|
||||
|
@ -525,13 +525,13 @@ def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=N
|
|||
prettycomiss = str(issueno)
|
||||
if issue_except != 'None':
|
||||
prettycomiss = str(prettycomiss) + issue_except
|
||||
logger.fdebug('Zero level supplement set to ' + str(mylar.ZERO_LEVEL_N) + '. Issue will be set as : ' + str(prettycomiss))
|
||||
logger.fdebug('Zero level supplement set to ' + str(mylar.CONFIG.ZERO_LEVEL_N) + '. Issue will be set as : ' + str(prettycomiss))
|
||||
elif len(str(issueno)) == 0:
|
||||
prettycomiss = str(issueno)
|
||||
logger.fdebug('issue length error - cannot determine length. Defaulting to None: ' + str(prettycomiss))
|
||||
|
||||
logger.fdebug('Pretty Comic Issue is : ' + str(prettycomiss))
|
||||
if mylar.UNICODE_ISSUENUMBER:
|
||||
if mylar.CONFIG.UNICODE_ISSUENUMBER:
|
||||
logger.fdebug('Setting this to Unicode format as requested: %s' % prettycomiss)
|
||||
prettycomiss = unicodeissue
|
||||
|
||||
|
@ -549,13 +549,13 @@ def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=N
|
|||
comversion = 'None'
|
||||
#if comversion is None, remove it so it doesn't populate with 'None'
|
||||
if comversion == 'None':
|
||||
chunk_f_f = re.sub('\$VolumeN', '', mylar.FILE_FORMAT)
|
||||
chunk_f_f = re.sub('\$VolumeN', '', mylar.CONFIG.FILE_FORMAT)
|
||||
chunk_f = re.compile(r'\s+')
|
||||
chunk_file_format = chunk_f.sub(' ', chunk_f_f)
|
||||
logger.fdebug('No version # found for series, removing from filename')
|
||||
logger.fdebug("new format: " + str(chunk_file_format))
|
||||
else:
|
||||
chunk_file_format = mylar.FILE_FORMAT
|
||||
chunk_file_format = mylar.CONFIG.FILE_FORMAT
|
||||
|
||||
if annualize is None:
|
||||
chunk_f_f = re.sub('\$Annual', '', chunk_file_format)
|
||||
|
@ -566,7 +566,7 @@ def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=N
|
|||
|
||||
else:
|
||||
logger.fdebug('chunk_file_format is: ' + str(chunk_file_format))
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
if 'annual' in series.lower():
|
||||
if '$Annual' not in chunk_file_format: # and 'annual' not in ofilename.lower():
|
||||
#if it's an annual, but $annual isn't specified in file_format, we need to
|
||||
|
@ -646,7 +646,7 @@ def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=N
|
|||
if ofilename.lower().endswith(extensions):
|
||||
path, ext = os.path.splitext(ofilename)
|
||||
|
||||
if mylar.FILE_FORMAT == '':
|
||||
if mylar.CONFIG.FILE_FORMAT == '':
|
||||
logger.fdebug('Rename Files is not enabled - keeping original filename.')
|
||||
#check if extension is in nzb_name - will screw up otherwise
|
||||
if ofilename.lower().endswith(extensions):
|
||||
|
@ -655,14 +655,14 @@ def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=N
|
|||
nfilename = ofilename
|
||||
else:
|
||||
nfilename = replace_all(chunk_file_format, file_values)
|
||||
if mylar.REPLACE_SPACES:
|
||||
#mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
|
||||
nfilename = nfilename.replace(' ', mylar.REPLACE_CHAR)
|
||||
if mylar.CONFIG.REPLACE_SPACES:
|
||||
#mylar.CONFIG.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
|
||||
nfilename = nfilename.replace(' ', mylar.CONFIG.REPLACE_CHAR)
|
||||
|
||||
nfilename = re.sub('[\,\:]', '', nfilename) + ext.lower()
|
||||
logger.fdebug('New Filename: ' + nfilename)
|
||||
|
||||
if mylar.LOWERCASE_FILENAMES:
|
||||
if mylar.CONFIG.LOWERCASE_FILENAMES:
|
||||
dst = os.path.join(comlocation, nfilename.lower())
|
||||
else:
|
||||
dst = os.path.join(comlocation, nfilename)
|
||||
|
@ -792,10 +792,10 @@ def updateComicLocation():
|
|||
|
||||
import db, logger
|
||||
myDB = db.DBConnection()
|
||||
if mylar.NEWCOM_DIR is not None:
|
||||
if mylar.CONFIG.NEWCOM_DIR is not None:
|
||||
logger.info('Performing a one-time mass update to Comic Location')
|
||||
#create the root dir if it doesn't exist
|
||||
checkdirectory = mylar.filechecker.validateAndCreateDirectory(mylar.NEWCOM_DIR, create=True)
|
||||
checkdirectory = mylar.filechecker.validateAndCreateDirectory(mylar.CONFIG.NEWCOM_DIR, create=True)
|
||||
if not checkdirectory:
|
||||
logger.warn('Error trying to validate/create directory. Aborting this process at this time.')
|
||||
return
|
||||
|
@ -816,11 +816,11 @@ def updateComicLocation():
|
|||
comversion = 'None'
|
||||
#if comversion is None, remove it so it doesn't populate with 'None'
|
||||
if comversion == 'None':
|
||||
chunk_f_f = re.sub('\$VolumeN', '', mylar.FOLDER_FORMAT)
|
||||
chunk_f_f = re.sub('\$VolumeN', '', mylar.CONFIG.FOLDER_FORMAT)
|
||||
chunk_f = re.compile(r'\s+')
|
||||
folderformat = chunk_f.sub(' ', chunk_f_f)
|
||||
else:
|
||||
folderformat = mylar.FOLDER_FORMAT
|
||||
folderformat = mylar.CONFIG.FOLDER_FORMAT
|
||||
|
||||
#do work to generate folder path
|
||||
|
||||
|
@ -835,20 +835,20 @@ def updateComicLocation():
|
|||
}
|
||||
|
||||
#set the paths here with the seperator removed allowing for cross-platform altering.
|
||||
ccdir = re.sub(r'[\\|/]', '%&', mylar.NEWCOM_DIR)
|
||||
ddir = re.sub(r'[\\|/]', '%&', mylar.DESTINATION_DIR)
|
||||
ccdir = re.sub(r'[\\|/]', '%&', mylar.CONFIG.NEWCOM_DIR)
|
||||
ddir = re.sub(r'[\\|/]', '%&', mylar.CONFIG.DESTINATION_DIR)
|
||||
dlc = re.sub(r'[\\|/]', '%&', dl['ComicLocation'])
|
||||
|
||||
if mylar.FFTONEWCOM_DIR:
|
||||
if mylar.CONFIG.FFTONEWCOM_DIR:
|
||||
#if this is enabled (1) it will apply the Folder_Format to all the new dirs
|
||||
if mylar.FOLDER_FORMAT == '':
|
||||
if mylar.CONFIG.FOLDER_FORMAT == '':
|
||||
comlocation = re.sub(ddir, ccdir, dlc).strip()
|
||||
else:
|
||||
first = replace_all(folderformat, values)
|
||||
if mylar.REPLACE_SPACES:
|
||||
#mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
|
||||
first = first.replace(' ', mylar.REPLACE_CHAR)
|
||||
comlocation = os.path.join(mylar.NEWCOM_DIR, first).strip()
|
||||
if mylar.CONFIG.REPLACE_SPACES:
|
||||
#mylar.CONFIG.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
|
||||
first = first.replace(' ', mylar.CONFIG.REPLACE_CHAR)
|
||||
comlocation = os.path.join(mylar.CONFIG.NEWCOM_DIR, first).strip()
|
||||
|
||||
else:
|
||||
#DESTINATION_DIR = /mnt/mediavg/Comics
|
||||
|
@ -865,7 +865,7 @@ def updateComicLocation():
|
|||
|
||||
if len(comloc) > 0:
|
||||
#give the information about what we're doing.
|
||||
if mylar.FFTONEWCOM_DIR:
|
||||
if mylar.CONFIG.FFTONEWCOM_DIR:
|
||||
logger.info('FFTONEWCOM_DIR is enabled. Applying the existing folder format to ALL directories regardless of existing location paths')
|
||||
else:
|
||||
logger.info('FFTONEWCOM_DIR is not enabled. I will keep existing subdirectory paths, and will only change the actual Comic Location in the path.')
|
||||
|
@ -883,8 +883,8 @@ def updateComicLocation():
|
|||
else:
|
||||
logger.info('There are no series in your watchlist to Update the locations. Not updating anything at this time.')
|
||||
#set the value to 0 here so we don't keep on doing this...
|
||||
mylar.LOCMOVE = 0
|
||||
mylar.config_write()
|
||||
mylar.CONFIG.LOCMOVE = 0
|
||||
#mylar.config_write()
|
||||
else:
|
||||
logger.info('No new ComicLocation path specified - not updating. Set NEWCOMD_DIR in config.ini')
|
||||
#raise cherrypy.HTTPRedirect("config")
|
||||
|
@ -1091,15 +1091,15 @@ def checkthepub(ComicID):
|
|||
pubchk = myDB.selectone("SELECT * FROM comics WHERE ComicID=?", [ComicID]).fetchone()
|
||||
if pubchk is None:
|
||||
logger.fdebug('No publisher information found to aid in determining series..defaulting to base check of 55 days.')
|
||||
return mylar.BIGGIE_PUB
|
||||
return mylar.CONFIG.BIGGIE_PUB
|
||||
else:
|
||||
for publish in publishers:
|
||||
if publish in pubchk['ComicPublisher'].lower():
|
||||
logger.fdebug('Biggie publisher detected - ' + pubchk['ComicPublisher'])
|
||||
return mylar.BIGGIE_PUB
|
||||
return mylar.CONFIG.BIGGIE_PUB
|
||||
|
||||
logger.fdebug('Indie publisher detected - ' + pubchk['ComicPublisher'])
|
||||
return mylar.INDIE_PUB
|
||||
return mylar.CONFIG.INDIE_PUB
|
||||
|
||||
def annual_update():
|
||||
import db, logger
|
||||
|
@ -1239,9 +1239,9 @@ def upgrade_dynamic():
|
|||
newVal = {"DynamicComicName": ds['DynamicComicName']}
|
||||
myDB.upsert("readinglist", newVal, CtrlVal)
|
||||
|
||||
logger.info('Finshed updating ' + str(len(dynamic_comiclist)) + ' / ' + str(len(dynamic_storylist)) + ' entries within the db.')
|
||||
mylar.DYNAMIC_UPDATE = 4
|
||||
mylar.config_write()
|
||||
logger.info('Finished updating ' + str(len(dynamic_comiclist)) + ' / ' + str(len(dynamic_storylist)) + ' entries within the db.')
|
||||
mylar.CONFIG.DYNAMIC_UPDATE = 4
|
||||
mylar.CONFIG.writeconfig()
|
||||
return
|
||||
|
||||
def checkFolder(folderpath=None):
|
||||
|
@ -1251,8 +1251,8 @@ def checkFolder(folderpath=None):
|
|||
queue = Queue.Queue()
|
||||
#monitor a selected folder for 'snatched' files that haven't been processed
|
||||
if folderpath is None:
|
||||
logger.info('Checking folder ' + mylar.CHECK_FOLDER + ' for newly snatched downloads')
|
||||
path = mylar.CHECK_FOLDER
|
||||
logger.info('Checking folder ' + mylar.CONFIG.CHECK_FOLDER + ' for newly snatched downloads')
|
||||
path = mylar.CONFIG.CHECK_FOLDER
|
||||
else:
|
||||
logger.info('Submitted folder ' + folderpath + ' for direct folder post-processing')
|
||||
path = folderpath
|
||||
|
@ -1300,7 +1300,7 @@ def havetotals(refreshit=None):
|
|||
myDB = db.DBConnection()
|
||||
|
||||
if refreshit is None:
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
comiclist = myDB.select('SELECT comics.*, COUNT(totalAnnuals.IssueID) AS TotalAnnuals FROM comics LEFT JOIN annuals as totalAnnuals on totalAnnuals.ComicID = comics.ComicID GROUP BY comics.ComicID order by comics.ComicSortName COLLATE NOCASE')
|
||||
else:
|
||||
comiclist = myDB.select('SELECT * FROM comics GROUP BY ComicID order by ComicSortName COLLATE NOCASE')
|
||||
|
@ -1323,7 +1323,7 @@ def havetotals(refreshit=None):
|
|||
# continue
|
||||
try:
|
||||
totalissues = comic['Total']
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
totalissues += comic['TotalAnnuals']
|
||||
haveissues = comic['Have']
|
||||
except TypeError:
|
||||
|
@ -1425,7 +1425,7 @@ def IssueDetails(filelocation, IssueID=None, justinfo=False):
|
|||
issuetag = None
|
||||
|
||||
if justinfo is False:
|
||||
dstlocation = os.path.join(mylar.CACHE_DIR, 'temp.zip')
|
||||
dstlocation = os.path.join(mylar.CONFIG.CACHE_DIR, 'temp.zip')
|
||||
|
||||
|
||||
if filelocation.endswith('.cbz'):
|
||||
|
@ -1451,14 +1451,14 @@ def IssueDetails(filelocation, IssueID=None, justinfo=False):
|
|||
low_infile_name = infile
|
||||
if infile == 'ComicInfo.xml':
|
||||
logger.fdebug('Extracting ComicInfo.xml to display.')
|
||||
dst = os.path.join(mylar.CACHE_DIR, 'ComicInfo.xml')
|
||||
dst = os.path.join(mylar.CONFIG.CACHE_DIR, 'ComicInfo.xml')
|
||||
data = inzipfile.read(infile)
|
||||
#print str(data)
|
||||
issuetag = 'xml'
|
||||
#looks for the first page and assumes it's the cover. (Alternate covers handled later on)
|
||||
elif any(['000.' in infile, '00.' in infile]) and infile.endswith(pic_extensions) and cover == "notfound":
|
||||
logger.fdebug('Extracting primary image ' + infile + ' as coverfile for display.')
|
||||
local_file = open(os.path.join(mylar.CACHE_DIR, 'temp.jpg'), "wb")
|
||||
local_file = open(os.path.join(mylar.CONFIG.CACHE_DIR, 'temp.jpg'), "wb")
|
||||
local_file.write(inzipfile.read(infile))
|
||||
local_file.close
|
||||
cover = "found"
|
||||
|
@ -1467,7 +1467,7 @@ def IssueDetails(filelocation, IssueID=None, justinfo=False):
|
|||
altlist = ('00a', '00b', '00c', '00d', '00e')
|
||||
for alt in altlist:
|
||||
if alt in infile:
|
||||
local_file = open(os.path.join(mylar.CACHE_DIR, 'temp.jpg'), "wb")
|
||||
local_file = open(os.path.join(mylar.CONFIG.CACHE_DIR, 'temp.jpg'), "wb")
|
||||
local_file.write(inzipfile.read(infile))
|
||||
local_file.close
|
||||
cover = "found"
|
||||
|
@ -1475,14 +1475,14 @@ def IssueDetails(filelocation, IssueID=None, justinfo=False):
|
|||
|
||||
elif any(['001.jpg' in infile, '001.png' in infile, '001.webp' in infile, '01.jpg' in infile, '01.png' in infile, '01.webp' in infile]) and cover == "notfound":
|
||||
logger.fdebug('Extracting primary image ' + infile + ' as coverfile for display.')
|
||||
local_file = open(os.path.join(mylar.CACHE_DIR, 'temp.jpg'), "wb")
|
||||
local_file = open(os.path.join(mylar.CONFIG.CACHE_DIR, 'temp.jpg'), "wb")
|
||||
local_file.write(inzipfile.read(infile))
|
||||
local_file.close
|
||||
cover = "found"
|
||||
|
||||
if cover != "found":
|
||||
logger.fdebug('Invalid naming sequence for jpgs discovered. Attempting to find the lowest sequence and will use as cover (it might not work). Currently : ' + str(low_infile))
|
||||
local_file = open(os.path.join(mylar.CACHE_DIR, 'temp.jpg'), "wb")
|
||||
local_file = open(os.path.join(mylar.CONFIG.CACHE_DIR, 'temp.jpg'), "wb")
|
||||
local_file.write(inzipfile.read(low_infile_name))
|
||||
local_file.close
|
||||
cover = "found"
|
||||
|
@ -1816,7 +1816,7 @@ def listLibrary():
|
|||
for row in list:
|
||||
library[row['ComicID']] = row['ComicID']
|
||||
# Add the annuals
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
list = myDB.select("SELECT ReleaseComicId,ComicID FROM Annuals")
|
||||
for row in list:
|
||||
library[row['ReleaseComicId']] = row['ComicID']
|
||||
|
@ -2008,7 +2008,7 @@ def listIssues(weeknumber, year):
|
|||
'IssueYear': tmpdate})
|
||||
|
||||
# Add the annuals
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
list = myDB.select("SELECT annuals.Status, annuals.ComicID, annuals.ReleaseComicID, annuals.IssueID, annuals.ComicName, annuals.ReleaseDate, annuals.IssueDate, weekly.publisher, annuals.Issue_Number from weekly, annuals where weekly.IssueID = annuals.IssueID and weeknumber = ? and year = ?", [int(weeknumber), year])
|
||||
for row in list:
|
||||
if row['ReleaseDate'] is None:
|
||||
|
@ -2115,12 +2115,12 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None):
|
|||
else:
|
||||
logger.info('[DUPECHECK] Assuming 0-byte file - this one is gonna get hammered.')
|
||||
|
||||
logger.fdebug('[DUPECHECK] Based on duplication preferences I will retain based on : ' + mylar.DUPECONSTRAINT)
|
||||
logger.fdebug('[DUPECHECK] Based on duplication preferences I will retain based on : ' + mylar.CONFIG.DUPECONSTRAINT)
|
||||
|
||||
tmp_dupeconstraint = mylar.DUPECONSTRAINT
|
||||
tmp_dupeconstraint = mylar.CONFIG.DUPECONSTRAINT
|
||||
|
||||
if any(['cbr' in mylar.DUPECONSTRAINT, 'cbz' in mylar.DUPECONSTRAINT]):
|
||||
if 'cbr' in mylar.DUPECONSTRAINT:
|
||||
if any(['cbr' in mylar.CONFIG.DUPECONSTRAINT, 'cbz' in mylar.CONFIG.DUPECONSTRAINT]):
|
||||
if 'cbr' in mylar.CONFIG.DUPECONSTRAINT:
|
||||
if filename.endswith('.cbr'):
|
||||
#this has to be configured in config - either retain cbr or cbz.
|
||||
if dupchk['Location'].endswith('.cbr'):
|
||||
|
@ -2141,7 +2141,7 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None):
|
|||
rtnval = {'action': "dupe_file",
|
||||
'to_dupe': filename}
|
||||
|
||||
elif 'cbz' in mylar.DUPECONSTRAINT:
|
||||
elif 'cbz' in mylar.CONFIG.DUPECONSTRAINT:
|
||||
if filename.endswith('.cbr'):
|
||||
if dupchk['Location'].endswith('.cbr'):
|
||||
logger.info('[DUPECHECK-CBZ PRIORITY] [#' + dupchk['Issue_Number'] + '] BOTH files are in cbr format. Retaining the larger filesize of the two.')
|
||||
|
@ -2161,7 +2161,7 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None):
|
|||
rtnval = {'action': "dupe_src",
|
||||
'to_dupe': os.path.join(series['ComicLocation'], dupchk['Location'])}
|
||||
|
||||
if mylar.DUPECONSTRAINT == 'filesize' or tmp_dupeconstraint == 'filesize':
|
||||
if mylar.CONFIG.DUPECONSTRAINT == 'filesize' or tmp_dupeconstraint == 'filesize':
|
||||
if filesz <= int(dupsize) and int(dupsize) != 0:
|
||||
logger.info('[DUPECHECK-FILESIZE PRIORITY] [#' + dupchk['Issue_Number'] + '] Retaining currently scanned in filename : ' + dupchk['Location'])
|
||||
rtnval = {'action': "dupe_file",
|
||||
|
@ -2227,7 +2227,7 @@ def torrent_create(site, linkid, alt=None):
|
|||
|
||||
def parse_32pfeed(rssfeedline):
|
||||
KEYS_32P = {}
|
||||
if mylar.ENABLE_32P and len(rssfeedline) > 1:
|
||||
if mylar.CONFIG.ENABLE_32P and len(rssfeedline) > 1:
|
||||
userid_st = rssfeedline.find('&user')
|
||||
userid_en = rssfeedline.find('&', userid_st +1)
|
||||
if userid_en == -1:
|
||||
|
@ -2252,7 +2252,7 @@ def parse_32pfeed(rssfeedline):
|
|||
KEYS_32P = {"user": USERID_32P,
|
||||
"auth": AUTH_32P,
|
||||
"authkey": AUTHKEY_32P,
|
||||
"passkey": mylar.PASSKEY_32P}
|
||||
"passkey": mylar.CONFIG.PASSKEY_32P}
|
||||
|
||||
return KEYS_32P
|
||||
|
||||
|
@ -2504,8 +2504,8 @@ def updatearc_locs(storyarcid, issues):
|
|||
if chk['Status'] == 'Downloaded':
|
||||
pathsrc = os.path.join(chk['ComicLocation'], chk['Location'])
|
||||
if not os.path.exists(pathsrc):
|
||||
if all([mylar.MULTIPLE_DEST_DIRS is not None, mylar.MULTIPLE_DEST_DIRS != 'None', os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(chk['ComicLocation'])) != chk['ComicLocation'], os.path.exists(os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(chk['ComicLocation'])))]):
|
||||
pathsrc = os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(chk['ComicLocation']), chk['Location'])
|
||||
if all([mylar.CONFIG.MULTIPLE_DEST_DIRS is not None, mylar.CONFIG.MULTIPLE_DEST_DIRS != 'None', os.path.join(mylar.CONFIG.MULTIPLE_DEST_DIRS, os.path.basename(chk['ComicLocation'])) != chk['ComicLocation'], os.path.exists(os.path.join(mylar.CONFIG.MULTIPLE_DEST_DIRS, os.path.basename(chk['ComicLocation'])))]):
|
||||
pathsrc = os.path.join(mylar.CONFIG.MULTIPLE_DEST_DIRS, os.path.basename(chk['ComicLocation']), chk['Location'])
|
||||
else:
|
||||
logger.fdebug(module + ' file does not exist in location: ' + pathdir + '. Cannot valid location - some options will not be available for this item.')
|
||||
continue
|
||||
|
@ -2531,12 +2531,12 @@ def updatearc_locs(storyarcid, issues):
|
|||
logger.info('grdst:' + grdst)
|
||||
#send to renamer here if valid.
|
||||
dfilename = chk['Location']
|
||||
if mylar.RENAME_FILES:
|
||||
if mylar.CONFIG.RENAME_FILES:
|
||||
renamed_file = rename_param(arcinfo['ComicID'], arcinfo['ComicName'], arcinfo['IssueNumber'], chk['Location'], issueid=arcinfo['IssueID'], arc=arcinfo['StoryArc'])
|
||||
if renamed_file:
|
||||
dfilename = renamed_file['nfilename']
|
||||
|
||||
if mylar.READ2FILENAME:
|
||||
if mylar.CONFIG.READ2FILENAME:
|
||||
#logger.fdebug('readingorder#: ' + str(arcinfo['ReadingOrder']))
|
||||
#if int(arcinfo['ReadingOrder']) < 10: readord = "00" + str(arcinfo['ReadingOrder'])
|
||||
#elif int(arcinfo['ReadingOrder']) >= 10 and int(arcinfo['ReadingOrder']) <= 99: readord = "0" + str(arcinfo['ReadingOrder'])
|
||||
|
@ -2549,7 +2549,7 @@ def updatearc_locs(storyarcid, issues):
|
|||
logger.fdebug('Destination Path : ' + pathdst)
|
||||
logger.fdebug('Source Path : ' + pathsrc)
|
||||
if not os.path.isfile(pathdst):
|
||||
logger.info('[' + mylar.ARC_FILEOPS.upper() + '] ' + pathsrc + ' into directory : ' + pathdst)
|
||||
logger.info('[' + mylar.CONFIG.ARC_FILEOPS.upper() + '] ' + pathsrc + ' into directory : ' + pathdst)
|
||||
|
||||
try:
|
||||
#need to ensure that src is pointing to the series in order to do a soft/hard-link properly
|
||||
|
@ -2557,7 +2557,7 @@ def updatearc_locs(storyarcid, issues):
|
|||
if not fileoperation:
|
||||
raise OSError
|
||||
except (OSError, IOError):
|
||||
logger.fdebug('[' + mylar.ARC_FILEOPS.upper() + '] Failure ' + pathsrc + ' - check directories and manually re-run.')
|
||||
logger.fdebug('[' + mylar.CONFIG.ARC_FILEOPS.upper() + '] Failure ' + pathsrc + ' - check directories and manually re-run.')
|
||||
continue
|
||||
updateloc = pathdst
|
||||
else:
|
||||
|
@ -2604,7 +2604,7 @@ def arcformat(arc, spanyears, publisher):
|
|||
'$spanyears': spanyears,
|
||||
'$publisher': publisher}
|
||||
|
||||
tmp_folderformat = mylar.ARC_FOLDERFORMAT
|
||||
tmp_folderformat = mylar.CONFIG.ARC_FOLDERFORMAT
|
||||
|
||||
if publisher == 'None':
|
||||
chunk_f_f = re.sub('\$publisher', '', tmp_folderformat)
|
||||
|
@ -2617,22 +2617,22 @@ def arcformat(arc, spanyears, publisher):
|
|||
else:
|
||||
arcpath = replace_all(tmp_folderformat, values)
|
||||
|
||||
if mylar.REPLACE_SPACES:
|
||||
arcpath = arcpath.replace(' ', mylar.REPLACE_CHAR)
|
||||
if mylar.CONFIG.REPLACE_SPACES:
|
||||
arcpath = arcpath.replace(' ', mylar.CONFIG.REPLACE_CHAR)
|
||||
|
||||
if arcpath.startswith('/'):
|
||||
arcpath = arcpath[1:]
|
||||
elif arcpath.startswith('//'):
|
||||
arcpath = arcpath[2:]
|
||||
|
||||
if mylar.STORYARCDIR:
|
||||
logger.info(mylar.DESTINATION_DIR)
|
||||
if mylar.CONFIG.STORYARCDIR:
|
||||
logger.info(mylar.CONFIG.DESTINATION_DIR)
|
||||
logger.info('StoryArcs')
|
||||
logger.info(arcpath)
|
||||
dstloc = os.path.join(mylar.DESTINATION_DIR, 'StoryArcs', arcpath)
|
||||
elif mylar.COPY2ARCDIR:
|
||||
logger.warn('Story arc directory is not configured. Defaulting to grabbag directory: ' + mylar.GRABBAG_DIR)
|
||||
dstloc = os.path.join(mylar.GRABBAG_DIR, arcpath)
|
||||
dstloc = os.path.join(mylar.CONFIG.DESTINATION_DIR, 'StoryArcs', arcpath)
|
||||
elif mylar.CONFIG.COPY2ARCDIR:
|
||||
logger.warn('Story arc directory is not configured. Defaulting to grabbag directory: ' + mylar.CONFIG.GRABBAG_DIR)
|
||||
dstloc = os.path.join(mylar.CONFIG.GRABBAG_DIR, arcpath)
|
||||
else:
|
||||
dstloc = None
|
||||
|
||||
|
@ -2672,7 +2672,7 @@ def torrentinfo(issueid=None, torrent_hash=None, download=False, monitor=False):
|
|||
#need to set the connect here as well....
|
||||
import torrent.clients.deluge as delu
|
||||
dp = delu.TorrentClient()
|
||||
if not dp.connect(mylar.DELUGE_HOST, mylar.DELUGE_USERNAME, mylar.DELUGE_PASSWORD):
|
||||
if not dp.connect(mylar.CONFIG.DELUGE_HOST, mylar.CONFIG.DELUGE_USERNAME, mylar.CONFIG.DELUGE_PASSWORD):
|
||||
logger.warn('Not connected to Deluge!')
|
||||
|
||||
torrent_info = dp.get_torrent(torrent_hash)
|
||||
|
@ -2707,17 +2707,17 @@ def torrentinfo(issueid=None, torrent_hash=None, download=False, monitor=False):
|
|||
|
||||
import shlex, subprocess
|
||||
logger.info('Torrent is completed and status is currently Snatched. Attempting to auto-retrieve.')
|
||||
with open(mylar.AUTO_SNATCH_SCRIPT, 'r') as f:
|
||||
with open(mylar.CONFIG.AUTO_SNATCH_SCRIPT, 'r') as f:
|
||||
first_line = f.readline()
|
||||
|
||||
if mylar.AUTO_SNATCH_SCRIPT.endswith('.sh'):
|
||||
if mylar.CONFIG.AUTO_SNATCH_SCRIPT.endswith('.sh'):
|
||||
shell_cmd = re.sub('#!', '', first_line)
|
||||
if shell_cmd == '' or shell_cmd is None:
|
||||
shell_cmd = '/bin/bash'
|
||||
else:
|
||||
shell_cmd = sys.executable
|
||||
|
||||
curScriptName = shell_cmd + ' ' + str(mylar.AUTO_SNATCH_SCRIPT).decode("string_escape")
|
||||
curScriptName = shell_cmd + ' ' + str(mylar.CONFIG.AUTO_SNATCH_SCRIPT).decode("string_escape")
|
||||
if torrent_files > 1:
|
||||
downlocation = torrent_folder.encode('utf-8')
|
||||
else:
|
||||
|
@ -2824,11 +2824,12 @@ def weekly_info(week=None, year=None):
|
|||
con_startweek = u"" + startweek.strftime(date_fmt).decode('cp1252')
|
||||
con_endweek = u"" + endweek.strftime(date_fmt).decode('cp1252')
|
||||
|
||||
if mylar.WEEKFOLDER_LOC is not None:
|
||||
weekdst = mylar.WEEKFOLDER_LOC
|
||||
if mylar.CONFIG.WEEKFOLDER_LOC is not None:
|
||||
weekdst = mylar.CONFIG.WEEKFOLDER_LOC
|
||||
else:
|
||||
weekdst = mylar.DESTINATION_DIR
|
||||
weekdst = mylar.CONFIG.DESTINATION_DIR
|
||||
|
||||
weekly_last = datetime.datetime.fromtimestamp(mylar.SCHED_WEEKLY_LAST)
|
||||
weekinfo = {'weeknumber': weeknumber,
|
||||
'startweek': con_startweek,
|
||||
'midweek': midweek.strftime('%Y-%m-%d'),
|
||||
|
@ -2839,9 +2840,9 @@ def weekly_info(week=None, year=None):
|
|||
'next_weeknumber': next_week,
|
||||
'next_year': next_year,
|
||||
'current_weeknumber': current_weeknumber,
|
||||
'last_update': mylar.PULL_REFRESH}
|
||||
'last_update': weekly_last.replace(microsecond=0)}
|
||||
|
||||
if mylar.WEEKFOLDER_FORMAT == 0:
|
||||
if mylar.CONFIG.WEEKFOLDER_FORMAT == 0:
|
||||
weekfold = os.path.join(weekdst, str( str(weekinfo['year']) + '-' + str(weeknumber) ))
|
||||
else:
|
||||
weekfold = os.path.join(weekdst, str( str(weekinfo['midweek']) ))
|
||||
|
@ -2892,7 +2893,7 @@ def script_env(mode, vars):
|
|||
#mode = on-snatch, pre-postprocess, post-postprocess
|
||||
#var = dictionary containing variables to pass
|
||||
if mode == 'on-snatch':
|
||||
runscript = mylar.SNATCH_SCRIPT
|
||||
runscript = mylar.CONFIG.SNATCH_SCRIPT
|
||||
if 'torrentinfo' in vars:
|
||||
if 'hash' in vars['torrentinfo']:
|
||||
os.environ['mylar_release_hash'] = vars['torrentinfo']['hash']
|
||||
|
@ -2954,10 +2955,10 @@ def script_env(mode, vars):
|
|||
|
||||
elif mode == 'post-process':
|
||||
#to-do
|
||||
runscript = mylar.EXTRA_SCRIPTS
|
||||
runscript = mylar.CONFIG.EXTRA_SCRIPTS
|
||||
elif mode == 'pre-process':
|
||||
#to-do
|
||||
runscript = mylar.PRE_SCRIPTS
|
||||
runscript = mylar.CONFIG.PRE_SCRIPTS
|
||||
|
||||
logger.fdebug('Initiating ' + mode + ' script detection.')
|
||||
with open(runscript, 'r') as f:
|
||||
|
@ -3092,6 +3093,7 @@ def job_management(write=False, job=None, last_run_completed=None, current_run=N
|
|||
|
||||
if prev_run_timestamp is not None:
|
||||
prev_run_time_utc = datetime.datetime.utcfromtimestamp(float(prev_run_timestamp))
|
||||
prev_run_time_utc = prev_run_time_utc.replace(microsecond=0)
|
||||
else:
|
||||
prev_run_time_utc = None
|
||||
#logger.fdebug('prev_run_time: %s' % prev_run_timestamp)
|
||||
|
@ -3123,39 +3125,46 @@ def job_management(write=False, job=None, last_run_completed=None, current_run=N
|
|||
#logger.fdebug('Updating info - status: %s' % status)
|
||||
updateCtrl = {'JobName': job}
|
||||
if current_run is not None:
|
||||
pr_datetime = datetime.datetime.utcfromtimestamp(current_run)
|
||||
pr_datetime = pr_datetime.replace(microsecond=0)
|
||||
updateVals = {'prev_run_timestamp': current_run,
|
||||
'prev_run_datetime': datetime.datetime.utcfromtimestamp(current_run),
|
||||
'prev_run_datetime': pr_datetime,
|
||||
'status': status}
|
||||
#logger.info('updateVals: %s' % updateVals)
|
||||
elif last_run_completed is not None:
|
||||
if job == 'DB Updater':
|
||||
mylar.SCHED.reschedule_job('dbupdater', trigger=IntervalTrigger(hours=0, minutes=5, timezone='UTC'))
|
||||
nextrun_stamp = utctimestamp() + (5 * 60)
|
||||
mylar.SCHED.reschedule_job('dbupdater', trigger=IntervalTrigger(hours=0, minutes=int(mylar.CONFIG.DBUPDATE_INTERVAL), timezone='UTC'))
|
||||
nextrun_stamp = utctimestamp() + (int(mylar.CONFIG.DBUPDATE_INTERVAL) * 60)
|
||||
elif job == 'Auto-Search':
|
||||
mylar.SCHED.reschedule_job('search', trigger=IntervalTrigger(hours=0, minutes=mylar.SEARCH_INTERVAL, timezone='UTC'))
|
||||
nextrun_stamp = utctimestamp() + (mylar.SEARCH_INTERVAL * 60)
|
||||
mylar.SCHED.reschedule_job('search', trigger=IntervalTrigger(hours=0, minutes=mylar.CONFIG.SEARCH_INTERVAL, timezone='UTC'))
|
||||
nextrun_stamp = utctimestamp() + (mylar.CONFIG.SEARCH_INTERVAL * 60)
|
||||
elif job == 'RSS Feeds':
|
||||
mylar.SCHED.reschedule_job('rss', trigger=IntervalTrigger(hours=0, minutes=int(mylar.RSS_CHECKINTERVAL), timezone='UTC'))
|
||||
nextrun_stamp = utctimestamp() + (int(mylar.RSS_CHECKINTERVAL) * 60)
|
||||
mylar.SCHED.reschedule_job('rss', trigger=IntervalTrigger(hours=0, minutes=int(mylar.CONFIG.RSS_CHECKINTERVAL), timezone='UTC'))
|
||||
nextrun_stamp = utctimestamp() + (int(mylar.CONFIG.RSS_CHECKINTERVAL) * 60)
|
||||
mylar.SCHED_RSS_LAST = last_run_completed
|
||||
elif job == 'Weekly Pullist':
|
||||
if mylar.ALT_PULL == 2:
|
||||
if mylar.CONFIG.ALT_PULL == 2:
|
||||
wkt = 4
|
||||
else:
|
||||
wkt = 24
|
||||
mylar.SCHED.reschedule_job('weekly', trigger=IntervalTrigger(hours=wkt, minutes=mylar.SEARCH_INTERVAL, timezone='UTC'))
|
||||
mylar.SCHED.reschedule_job('weekly', trigger=IntervalTrigger(hours=wkt, minutes=mylar.CONFIG.SEARCH_INTERVAL, timezone='UTC'))
|
||||
nextrun_stamp = utctimestamp() + (wkt * 60 * 60)
|
||||
mylar.SCHED_WEEKLY_LAST = last_run_completed
|
||||
elif job == 'Check Version':
|
||||
mylar.SCHED.reschedule_job('version', trigger=IntervalTrigger(hours=0, minutes=mylar.CHECK_GITHUB_INTERVAL, timezone='UTC'))
|
||||
nextrun_stamp = utctimestamp() + (mylar.CHECK_GITHUB_INTERVAL * 60)
|
||||
mylar.SCHED.reschedule_job('version', trigger=IntervalTrigger(hours=0, minutes=mylar.CONFIG.CHECK_GITHUB_INTERVAL, timezone='UTC'))
|
||||
nextrun_stamp = utctimestamp() + (mylar.CONFIG.CHECK_GITHUB_INTERVAL * 60)
|
||||
elif job == 'Folder Monitor':
|
||||
mylar.SCHED.reschedule_job('monitor', trigger=IntervalTrigger(hours=0, minutes=int(mylar.DOWNLOAD_SCAN_INTERVAL), timezone='UTC'))
|
||||
nextrun_stamp = utctimestamp() + (int(mylar.DOWNLOAD_SCAN_INTERVAL) * 60)
|
||||
mylar.SCHED.reschedule_job('monitor', trigger=IntervalTrigger(hours=0, minutes=int(mylar.CONFIG.DOWNLOAD_SCAN_INTERVAL), timezone='UTC'))
|
||||
nextrun_stamp = utctimestamp() + (int(mylar.CONFIG.DOWNLOAD_SCAN_INTERVAL) * 60)
|
||||
|
||||
nextrun_date = datetime.datetime.utcfromtimestamp(nextrun_stamp)
|
||||
nextrun_date = nextrun_date.replace(microsecond=0)
|
||||
logger.fdebug('ReScheduled job: %s to %s' % (job, nextrun_date))
|
||||
lastrun_comp = datetime.datetime.utcfromtimestamp(last_run_completed)
|
||||
lastrun_comp = lastrun_comp.replace(microsecond=0)
|
||||
#if it's completed, then update the last run time to the ending time of the job
|
||||
updateVals = {'prev_run_timestamp': last_run_completed,
|
||||
'prev_run_datetime': datetime.datetime.utcfromtimestamp(last_run_completed),
|
||||
'prev_run_datetime': lastrun_comp,
|
||||
'last_run_completed': 'True',
|
||||
'next_run_timestamp': nextrun_stamp,
|
||||
'next_run_datetime': nextrun_date,
|
||||
|
@ -3164,6 +3173,16 @@ def job_management(write=False, job=None, last_run_completed=None, current_run=N
|
|||
#logger.fdebug('Job update for %s: %s' % (updateCtrl, updateVals))
|
||||
myDB.upsert('jobhistory', updateVals, updateCtrl)
|
||||
|
||||
|
||||
def stupidchk():
|
||||
import db
|
||||
myDB = db.DBConnection()
|
||||
CCOMICS = myDB.select("SELECT COUNT(*) FROM comics WHERE Status='Active'")
|
||||
ens = myDB.select("SELECT COUNT(*) FROM comics WHERE Status='Loading' OR Status='Paused'")
|
||||
mylar.COUNT_COMICS = CCOMICS[0][0]
|
||||
mylar.EN_OOMICS = ens[0][0]
|
||||
|
||||
|
||||
def file_ops(path,dst,arc=False,one_off=False):
|
||||
# # path = source path + filename
|
||||
# # dst = destination path + filename
|
||||
|
@ -3175,9 +3194,9 @@ def file_ops(path,dst,arc=False,one_off=False):
|
|||
# #will be either copy / move
|
||||
|
||||
if any([one_off, arc]):
|
||||
action_op = mylar.ARC_FILEOPS
|
||||
action_op = mylar.CONFIG.ARC_FILEOPS
|
||||
else:
|
||||
action_op = mylar.FILE_OPTS
|
||||
action_op = mylar.CONFIG.FILE_OPTS
|
||||
|
||||
if action_op == 'copy' or (arc is True and any([action_op == 'copy', action_op == 'move'])):
|
||||
try:
|
||||
|
@ -3272,7 +3291,7 @@ def file_ops(path,dst,arc=False,one_off=False):
|
|||
|
||||
#option 2
|
||||
import lib.winlink as winlink
|
||||
if mylar.FILE_OPTS == 'hardlink':
|
||||
if mylar.CONFIG.FILE_OPTS == 'hardlink':
|
||||
try:
|
||||
os.system(r'mklink /H dst path')
|
||||
logger.fdebug('Successfully hardlinked file [' + dst + ' --> ' + path + ']')
|
||||
|
@ -3284,7 +3303,7 @@ def file_ops(path,dst,arc=False,one_off=False):
|
|||
except:
|
||||
return False
|
||||
|
||||
elif mylar.FILE_OPTS == 'softlink': #ie. shortcut.
|
||||
elif mylar.CONFIG.FILE_OPTS == 'softlink': #ie. shortcut.
|
||||
try:
|
||||
shutil.move( path, dst )
|
||||
if os.path.lexists( path ):
|
||||
|
|
|
@ -121,7 +121,7 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
|
|||
#--Now that we know ComicName, let's try some scraping
|
||||
#--Start
|
||||
# gcd will return issue details (most importantly publishing date)
|
||||
if not mylar.CV_ONLY:
|
||||
if not mylar.CONFIG.CV_ONLY:
|
||||
if mismatch == "no" or mismatch is None:
|
||||
gcdinfo=parseit.GCDScraper(comic['ComicName'], comic['ComicYear'], comic['ComicIssues'], comicid)
|
||||
#print ("gcdinfo: " + str(gcdinfo))
|
||||
|
@ -152,7 +152,7 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
|
|||
CV_NoYearGiven = "no"
|
||||
#if the SeriesYear returned by CV is blank or none (0000), let's use the gcd one.
|
||||
if any([comic['ComicYear'] is None, comic['ComicYear'] == '0000', comic['ComicYear'][-1:] == '-']):
|
||||
if mylar.CV_ONLY:
|
||||
if mylar.CONFIG.CV_ONLY:
|
||||
#we'll defer this until later when we grab all the issues and then figure it out
|
||||
logger.info('Uh-oh. I cannot find a Series Year for this series. I am going to try analyzing deeper.')
|
||||
SeriesYear = cv.getComic(comicid, 'firstissue', comic['FirstIssueID'])
|
||||
|
@ -180,16 +180,16 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
|
|||
if comic['ComicVersion'].isdigit():
|
||||
comicVol = 'v' + comic['ComicVersion']
|
||||
logger.info('Updated version to :' + str(comicVol))
|
||||
if all([mylar.SETDEFAULTVOLUME is False, comicVol == 'v1']):
|
||||
if all([mylar.CONFIG.SETDEFAULTVOLUME is False, comicVol == 'v1']):
|
||||
comicVol = None
|
||||
else:
|
||||
if mylar.SETDEFAULTVOLUME is True:
|
||||
if mylar.CONFIG.SETDEFAULTVOLUME is True:
|
||||
comicVol = 'v1'
|
||||
else:
|
||||
comicVol = None
|
||||
else:
|
||||
comicVol = oldcomversion
|
||||
if all([mylar.SETDEFAULTVOLUME is True, comicVol is None]):
|
||||
if all([mylar.CONFIG.SETDEFAULTVOLUME is True, comicVol is None]):
|
||||
comicVol = 'v1'
|
||||
|
||||
|
||||
|
@ -212,13 +212,13 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
|
|||
comicVol = 'None'
|
||||
#if comversion is None, remove it so it doesn't populate with 'None'
|
||||
if comicVol == 'None':
|
||||
chunk_f_f = re.sub('\$VolumeN', '', mylar.FOLDER_FORMAT)
|
||||
chunk_f_f = re.sub('\$VolumeN', '', mylar.CONFIG.FOLDER_FORMAT)
|
||||
chunk_f = re.compile(r'\s+')
|
||||
chunk_folder_format = chunk_f.sub(' ', chunk_f_f)
|
||||
logger.fdebug('No version # found for series, removing from folder format')
|
||||
logger.fdebug("new folder format: " + str(chunk_folder_format))
|
||||
else:
|
||||
chunk_folder_format = mylar.FOLDER_FORMAT
|
||||
chunk_folder_format = mylar.CONFIG.FOLDER_FORMAT
|
||||
|
||||
#do work to generate folder path
|
||||
|
||||
|
@ -232,19 +232,19 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
|
|||
'$Annual': 'Annual'
|
||||
}
|
||||
|
||||
if mylar.FOLDER_FORMAT == '':
|
||||
comlocation = os.path.join(mylar.DESTINATION_DIR, comicdir, " (" + SeriesYear + ")")
|
||||
if mylar.CONFIG.FOLDER_FORMAT == '':
|
||||
comlocation = os.path.join(mylar.CONFIG.DESTINATION_DIR, comicdir, " (" + SeriesYear + ")")
|
||||
else:
|
||||
comlocation = os.path.join(mylar.DESTINATION_DIR, helpers.replace_all(chunk_folder_format, values))
|
||||
comlocation = os.path.join(mylar.CONFIG.DESTINATION_DIR, helpers.replace_all(chunk_folder_format, values))
|
||||
|
||||
|
||||
#comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + comic['ComicYear'] + ")"
|
||||
if mylar.DESTINATION_DIR == "":
|
||||
#comlocation = mylar.CONFIG.DESTINATION_DIR + "/" + comicdir + " (" + comic['ComicYear'] + ")"
|
||||
if mylar.CONFIG.DESTINATION_DIR == "":
|
||||
logger.error('There is no Comic Location Path specified - please specify one in Config/Web Interface.')
|
||||
return
|
||||
if mylar.REPLACE_SPACES:
|
||||
#mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
|
||||
comlocation = comlocation.replace(' ', mylar.REPLACE_CHAR)
|
||||
if mylar.CONFIG.REPLACE_SPACES:
|
||||
#mylar.CONFIG.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
|
||||
comlocation = comlocation.replace(' ', mylar.CONFIG.REPLACE_CHAR)
|
||||
|
||||
#moved this out of the above loop so it will chk for existance of comlocation in case moved
|
||||
#if it doesn't exist - create it (otherwise will bugger up later on)
|
||||
|
@ -267,34 +267,34 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
|
|||
#comicIssues = gcdinfo['totalissues']
|
||||
comicIssues = comic['ComicIssues']
|
||||
|
||||
if not mylar.CV_ONLY:
|
||||
if not mylar.CONFIG.CV_ONLY:
|
||||
if gcdinfo['gcdvariation'] == "cv":
|
||||
comicIssues = str(int(comic['ComicIssues']) + 1)
|
||||
|
||||
#let's download the image...
|
||||
if os.path.exists(mylar.CACHE_DIR): pass
|
||||
if os.path.exists(mylar.CONFIG.CACHE_DIR): pass
|
||||
else:
|
||||
#let's make the dir.
|
||||
try:
|
||||
os.makedirs(str(mylar.CACHE_DIR))
|
||||
logger.info('Cache Directory successfully created at: ' + str(mylar.CACHE_DIR))
|
||||
os.makedirs(str(mylar.CONFIG.CACHE_DIR))
|
||||
logger.info('Cache Directory successfully created at: ' + str(mylar.CONFIG.CACHE_DIR))
|
||||
|
||||
except OSError:
|
||||
logger.error('Could not create cache dir. Check permissions of cache dir: ' + str(mylar.CACHE_DIR))
|
||||
logger.error('Could not create cache dir. Check permissions of cache dir: ' + str(mylar.CONFIG.CACHE_DIR))
|
||||
|
||||
coverfile = os.path.join(mylar.CACHE_DIR, str(comicid) + ".jpg")
|
||||
coverfile = os.path.join(mylar.CONFIG.CACHE_DIR, str(comicid) + ".jpg")
|
||||
|
||||
#if cover has '+' in url it's malformed, we need to replace '+' with '%20' to retreive properly.
|
||||
|
||||
#new CV API restriction - one api request / second.(probably unecessary here, but it doesn't hurt)
|
||||
if mylar.CVAPI_RATE is None or mylar.CVAPI_RATE < 2:
|
||||
if mylar.CONFIG.CVAPI_RATE is None or mylar.CONFIG.CVAPI_RATE < 2:
|
||||
time.sleep(2)
|
||||
else:
|
||||
time.sleep(mylar.CVAPI_RATE)
|
||||
time.sleep(mylar.CONFIG.CVAPI_RATE)
|
||||
|
||||
logger.info('Attempting to retrieve the comic image for series')
|
||||
try:
|
||||
r = requests.get(comic['ComicImage'], params=None, stream=True, verify=mylar.CV_VERIFY, headers=mylar.CV_HEADERS)
|
||||
r = requests.get(comic['ComicImage'], params=None, stream=True, verify=mylar.CONFIG.CV_VERIFY, headers=mylar.CV_HEADERS)
|
||||
except Exception, e:
|
||||
logger.warn('Unable to download image from CV URL link: ' + comic['ComicImage'] + ' [Status Code returned: ' + str(r.status_code) + ']')
|
||||
|
||||
|
@ -330,7 +330,7 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
|
|||
|
||||
logger.info('Attempting to retrieve alternate comic image for the series.')
|
||||
try:
|
||||
r = requests.get(comic['ComicImageALT'], params=None, stream=True, verify=mylar.CV_VERIFY, headers=mylar.CV_HEADERS)
|
||||
r = requests.get(comic['ComicImageALT'], params=None, stream=True, verify=mylar.CONFIG.CV_VERIFY, headers=mylar.CV_HEADERS)
|
||||
except Exception, e:
|
||||
logger.warn('Unable to download image from CV URL link: ' + comic['ComicImageALT'] + ' [Status Code returned: ' + str(r.status_code) + ']')
|
||||
|
||||
|
@ -355,14 +355,14 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
|
|||
|
||||
#this is for Firefox when outside the LAN...it works, but I don't know how to implement it
|
||||
#without breaking the normal flow for inside the LAN (above)
|
||||
#ComicImage = "http://" + str(mylar.HTTP_HOST) + ":" + str(mylar.HTTP_PORT) + "/cache/" + str(comicid) + ".jpg"
|
||||
#ComicImage = "http://" + str(mylar.CONFIG.HTTP_HOST) + ":" + str(mylar.CONFIG.HTTP_PORT) + "/cache/" + str(comicid) + ".jpg"
|
||||
|
||||
#if the comic cover local is checked, save a cover.jpg to the series folder.
|
||||
if mylar.COMIC_COVER_LOCAL:
|
||||
if mylar.CONFIG.COMIC_COVER_LOCAL:
|
||||
try:
|
||||
comiclocal = os.path.join(comlocation, 'cover.jpg')
|
||||
shutil.copyfile(coverfile, comiclocal)
|
||||
if mylar.ENFORCE_PERMS:
|
||||
if mylar.CONFIG.ENFORCE_PERMS:
|
||||
filechecker.setperms(comiclocal)
|
||||
except IOError as e:
|
||||
logger.error('Unable to save cover (' + str(coverfile) + ') into series directory (' + str(comiclocal) + ') at this time.')
|
||||
|
@ -433,8 +433,8 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
|
|||
if anndata:
|
||||
manualAnnual(annchk=anndata)
|
||||
|
||||
if mylar.CVINFO or (mylar.CV_ONLY and mylar.CVINFO):
|
||||
if not os.path.exists(os.path.join(comlocation, "cvinfo")) or mylar.CV_ONETIMER:
|
||||
if mylar.CONFIG.CVINFO or (mylar.CONFIG.CV_ONLY and mylar.CONFIG.CVINFO):
|
||||
if not os.path.exists(os.path.join(comlocation, "cvinfo")) or mylar.CONFIG.CV_ONETIMER:
|
||||
with open(os.path.join(comlocation, "cvinfo"), "w") as text_file:
|
||||
text_file.write(str(comic['ComicURL']))
|
||||
|
||||
|
@ -467,7 +467,7 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
|
|||
if imported is None or imported == 'None' or imported == 'futurecheck':
|
||||
pass
|
||||
else:
|
||||
if mylar.IMP_MOVE:
|
||||
if mylar.CONFIG.IMP_MOVE:
|
||||
logger.info('Mass import - Move files')
|
||||
moveit.movefiles(comicid, comlocation, imported)
|
||||
else:
|
||||
|
@ -487,11 +487,11 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
|
|||
if pullupd is None:
|
||||
# lets' check the pullist for anything at this time as well since we're here.
|
||||
# do this for only Present comics....
|
||||
if mylar.AUTOWANT_UPCOMING and lastpubdate == 'Present' and series_status == 'Active': #and 'Present' in gcdinfo['resultPublished']:
|
||||
if mylar.CONFIG.AUTOWANT_UPCOMING and lastpubdate == 'Present' and series_status == 'Active': #and 'Present' in gcdinfo['resultPublished']:
|
||||
logger.fdebug('latestissue: #' + str(latestiss))
|
||||
chkstats = myDB.selectone("SELECT * FROM issues WHERE ComicID=? AND Int_IssueNumber=?", [comicid, helpers.issuedigits(latestiss)]).fetchone()
|
||||
if chkstats is None:
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
chkstats = myDB.selectone("SELECT * FROM annuals WHERE ComicID=? AND Int_IssueNumber=?", [comicid, helpers.issuedigits(latestiss)]).fetchone()
|
||||
|
||||
if chkstats:
|
||||
|
@ -513,7 +513,7 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
|
|||
'Issue_Number': issr['Issue_Number'],
|
||||
'Status': issr['Status']
|
||||
})
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
an_results = myDB.select("SELECT * FROM annuals WHERE ComicID=? AND Status='Wanted'", [comicid])
|
||||
if an_results:
|
||||
for ar in an_results:
|
||||
|
@ -684,18 +684,18 @@ def GCDimport(gcomicid, pullupd=None, imported=None, ogcname=None):
|
|||
'$Volume': year
|
||||
}
|
||||
|
||||
if mylar.FOLDER_FORMAT == '':
|
||||
comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + comic['ComicYear'] + ")"
|
||||
if mylar.CONFIG.FOLDER_FORMAT == '':
|
||||
comlocation = mylar.CONFIG.DESTINATION_DIR + "/" + comicdir + " (" + comic['ComicYear'] + ")"
|
||||
else:
|
||||
comlocation = mylar.DESTINATION_DIR + "/" + helpers.replace_all(mylar.FOLDER_FORMAT, values)
|
||||
comlocation = mylar.CONFIG.DESTINATION_DIR + "/" + helpers.replace_all(mylar.CONFIG.FOLDER_FORMAT, values)
|
||||
|
||||
#comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + ComicYear + ")"
|
||||
if mylar.DESTINATION_DIR == "":
|
||||
#comlocation = mylar.CONFIG.DESTINATION_DIR + "/" + comicdir + " (" + ComicYear + ")"
|
||||
if mylar.CONFIG.DESTINATION_DIR == "":
|
||||
logger.error(u"There is no general directory specified - please specify in Config/Post-Processing.")
|
||||
return
|
||||
if mylar.REPLACE_SPACES:
|
||||
#mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
|
||||
comlocation = comlocation.replace(' ', mylar.REPLACE_CHAR)
|
||||
if mylar.CONFIG.REPLACE_SPACES:
|
||||
#mylar.CONFIG.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
|
||||
comlocation = comlocation.replace(' ', mylar.CONFIG.REPLACE_CHAR)
|
||||
|
||||
#if it doesn't exist - create it (otherwise will bugger up later on)
|
||||
if os.path.isdir(comlocation):
|
||||
|
@ -715,23 +715,23 @@ def GCDimport(gcomicid, pullupd=None, imported=None, ogcname=None):
|
|||
comicIssues = gcdinfo['totalissues']
|
||||
|
||||
#let's download the image...
|
||||
if os.path.exists(mylar.CACHE_DIR): pass
|
||||
if os.path.exists(mylar.CONFIG.CACHE_DIR): pass
|
||||
else:
|
||||
#let's make the dir.
|
||||
try:
|
||||
os.makedirs(str(mylar.CACHE_DIR))
|
||||
logger.info(u"Cache Directory successfully created at: " + str(mylar.CACHE_DIR))
|
||||
os.makedirs(str(mylar.CONFIG.CACHE_DIR))
|
||||
logger.info(u"Cache Directory successfully created at: " + str(mylar.CONFIG.CACHE_DIR))
|
||||
|
||||
except OSError:
|
||||
logger.error(u"Could not create cache dir : " + str(mylar.CACHE_DIR))
|
||||
logger.error(u"Could not create cache dir : " + str(mylar.CONFIG.CACHE_DIR))
|
||||
|
||||
coverfile = os.path.join(mylar.CACHE_DIR, str(gcomicid) + ".jpg")
|
||||
coverfile = os.path.join(mylar.CONFIG.CACHE_DIR, str(gcomicid) + ".jpg")
|
||||
|
||||
#new CV API restriction - one api request / second.
|
||||
if mylar.CVAPI_RATE is None or mylar.CVAPI_RATE < 2:
|
||||
if mylar.CONFIG.CVAPI_RATE is None or mylar.CONFIG.CVAPI_RATE < 2:
|
||||
time.sleep(2)
|
||||
else:
|
||||
time.sleep(mylar.CVAPI_RATE)
|
||||
time.sleep(mylar.CONFIG.CVAPI_RATE)
|
||||
|
||||
urllib.urlretrieve(str(ComicImage), str(coverfile))
|
||||
try:
|
||||
|
@ -740,11 +740,11 @@ def GCDimport(gcomicid, pullupd=None, imported=None, ogcname=None):
|
|||
|
||||
#this is for Firefox when outside the LAN...it works, but I don't know how to implement it
|
||||
#without breaking the normal flow for inside the LAN (above)
|
||||
#ComicImage = "http://" + str(mylar.HTTP_HOST) + ":" + str(mylar.HTTP_PORT) + "/cache/" + str(comi$
|
||||
#ComicImage = "http://" + str(mylar.CONFIG.HTTP_HOST) + ":" + str(mylar.CONFIG.HTTP_PORT) + "/cache/" + str(comi$
|
||||
|
||||
logger.info(u"Sucessfully retrieved cover for " + ComicName)
|
||||
#if the comic cover local is checked, save a cover.jpg to the series folder.
|
||||
if mylar.COMIC_COVER_LOCAL:
|
||||
if mylar.CONFIG.COMIC_COVER_LOCAL:
|
||||
comiclocal = os.path.join(comlocation + "/cover.jpg")
|
||||
shutil.copy(ComicImage, comiclocal)
|
||||
except IOError as e:
|
||||
|
@ -859,9 +859,9 @@ def GCDimport(gcomicid, pullupd=None, imported=None, ogcname=None):
|
|||
#print ("issueid:" + str(controlValueDict))
|
||||
#print ("values:" + str(newValueDict))
|
||||
|
||||
if mylar.AUTOWANT_ALL:
|
||||
if mylar.CONFIG.AUTOWANT_ALL:
|
||||
newValueDict['Status'] = "Wanted"
|
||||
elif issdate > helpers.today() and mylar.AUTOWANT_UPCOMING:
|
||||
elif issdate > helpers.today() and mylar.CONFIG.AUTOWANT_UPCOMING:
|
||||
newValueDict['Status'] = "Wanted"
|
||||
else:
|
||||
newValueDict['Status'] = "Skipped"
|
||||
|
@ -890,7 +890,7 @@ def GCDimport(gcomicid, pullupd=None, imported=None, ogcname=None):
|
|||
|
||||
myDB.upsert("comics", newValueStat, controlValueStat)
|
||||
|
||||
if mylar.CVINFO:
|
||||
if mylar.CONFIG.CVINFO:
|
||||
if not os.path.exists(comlocation + "/cvinfo"):
|
||||
with open(comlocation + "/cvinfo", "w") as text_file:
|
||||
text_file.write("http://comicvine.gamespot.com/volume/49-" + str(comicid))
|
||||
|
@ -901,7 +901,7 @@ def GCDimport(gcomicid, pullupd=None, imported=None, ogcname=None):
|
|||
if imported is None or imported == 'None':
|
||||
pass
|
||||
else:
|
||||
if mylar.IMP_MOVE:
|
||||
if mylar.CONFIG.IMP_MOVE:
|
||||
logger.info("Mass import - Move files")
|
||||
moveit.movefiles(gcomicid, comlocation, ogcname)
|
||||
else:
|
||||
|
@ -914,7 +914,7 @@ def GCDimport(gcomicid, pullupd=None, imported=None, ogcname=None):
|
|||
|
||||
if pullupd is None:
|
||||
# lets' check the pullist for anyting at this time as well since we're here.
|
||||
if mylar.AUTOWANT_UPCOMING and 'Present' in ComicPublished:
|
||||
if mylar.CONFIG.AUTOWANT_UPCOMING and 'Present' in ComicPublished:
|
||||
logger.info(u"Checking this week's pullist for new issues of " + ComicName)
|
||||
updater.newpullcheck(comic['ComicName'], gcomicid)
|
||||
|
||||
|
@ -926,7 +926,7 @@ def GCDimport(gcomicid, pullupd=None, imported=None, ogcname=None):
|
|||
|
||||
for result in results:
|
||||
foundNZB = "none"
|
||||
if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.NZBX) and (mylar.SAB_HOST):
|
||||
if (mylar.CONFIG.NZBSU or mylar.CONFIG.DOGNZB or mylar.CONFIG.EXPERIMENTAL or mylar.CONFIG.NEWZNAB) and (mylar.CONFIG.SAB_HOST):
|
||||
foundNZB = search.searchforissue(result['IssueID'])
|
||||
if foundNZB == "yes":
|
||||
updater.foundsearch(result['ComicID'], result['IssueID'])
|
||||
|
@ -971,27 +971,27 @@ def issue_collection(issuedata, nostatus):
|
|||
# Only change the status & add DateAdded if the issue is already in the database
|
||||
if iss_exists is None:
|
||||
newValueDict['DateAdded'] = helpers.today()
|
||||
if issue['ReleaseDate'] == '00000000':
|
||||
if issue['ReleaseDate'] == '0000-00-00':
|
||||
dk = re.sub('-', '', issue['IssueDate']).strip()
|
||||
else:
|
||||
dk = re.sub('-', '', issue['ReleaseDate']).strip() # converts date to 20140718 format
|
||||
if dk == '00000000':
|
||||
if dk == '0000-00-00':
|
||||
logger.warn('Issue Data is invalid for Issue Number %s. Marking this issue as Skipped' % issue['Issue_Number'])
|
||||
newValueDict['Status'] = "Skipped"
|
||||
else:
|
||||
datechk = datetime.datetime.strptime(dk, "%Y%m%d")
|
||||
issue_week = datetime.datetime.strftime(datechk, "%Y%U")
|
||||
if mylar.AUTOWANT_ALL:
|
||||
if mylar.CONFIG.AUTOWANT_ALL:
|
||||
newValueDict['Status'] = "Wanted"
|
||||
#logger.fdebug('autowant all')
|
||||
elif issue_week >= now_week and mylar.AUTOWANT_UPCOMING:
|
||||
elif issue_week >= now_week and mylar.CONFIG.AUTOWANT_UPCOMING:
|
||||
#logger.fdebug(str(datechk) + ' >= ' + str(nowtime))
|
||||
newValueDict['Status'] = "Wanted"
|
||||
else:
|
||||
newValueDict['Status'] = "Skipped"
|
||||
#logger.fdebug('status is : ' + str(newValueDict))
|
||||
else:
|
||||
logger.fdebug('Existing status for issue #%s : %s' % (issue['Issue_Number'], iss_exists['Status']))
|
||||
#logger.fdebug('Existing status for issue #%s : %s' % (issue['Issue_Number'], iss_exists['Status']))
|
||||
if any([iss_exists['Status'] is None, iss_exists['Status'] == 'None']):
|
||||
is_status = 'Skipped'
|
||||
else:
|
||||
|
@ -1509,9 +1509,9 @@ def annual_check(ComicName, SeriesYear, comicid, issuetype, issuechk, annualslis
|
|||
datechk = datetime.datetime.strptime(dk, "%Y%m%d")
|
||||
issue_week = datetime.datetime.strftime(datechk, "%Y%U")
|
||||
|
||||
if mylar.AUTOWANT_ALL:
|
||||
if mylar.CONFIG.AUTOWANT_ALL:
|
||||
astatus = "Wanted"
|
||||
elif issue_week >= now_week and mylar.AUTOWANT_UPCOMING:
|
||||
elif issue_week >= now_week and mylar.CONFIG.AUTOWANT_UPCOMING:
|
||||
astatus = "Wanted"
|
||||
else:
|
||||
astatus = "Skipped"
|
||||
|
|
|
@ -32,7 +32,7 @@ def libraryScan(dir=None, append=False, ComicID=None, ComicName=None, cron=None,
|
|||
return
|
||||
|
||||
if not dir:
|
||||
dir = mylar.COMIC_DIR
|
||||
dir = mylar.CONFIG.COMIC_DIR
|
||||
|
||||
# If we're appending a dir, it's coming from the post processor which is
|
||||
# already bytestring
|
||||
|
@ -266,7 +266,7 @@ def libraryScan(dir=None, append=False, ComicID=None, ComicName=None, cron=None,
|
|||
# don't scan in it again if it's already been done initially
|
||||
# continue
|
||||
|
||||
if mylar.IMP_METADATA:
|
||||
if mylar.CONFIG.IMP_METADATA:
|
||||
#if read tags is enabled during import, check here.
|
||||
if i['ComicLocation'].endswith('.cbz'):
|
||||
logger.fdebug('[IMPORT-CBZ] Metatagging checking enabled.')
|
||||
|
@ -517,7 +517,7 @@ def libraryScan(dir=None, append=False, ComicID=None, ComicName=None, cron=None,
|
|||
comicids = []
|
||||
|
||||
if watchfound > 0:
|
||||
if mylar.IMP_MOVE:
|
||||
if mylar.CONFIG.IMP_MOVE:
|
||||
logger.info('You checked off Move Files...so that\'s what I am going to do')
|
||||
#check to see if Move Files is enabled.
|
||||
#if not being moved, set the archive bit.
|
||||
|
@ -535,8 +535,8 @@ def libraryScan(dir=None, append=False, ComicID=None, ComicName=None, cron=None,
|
|||
logger.fdebug('Orig. Location: ' + orig_comlocation)
|
||||
logger.fdebug('Orig. Filename: ' + orig_filename)
|
||||
#before moving check to see if Rename to Mylar structure is enabled.
|
||||
if mylar.IMP_RENAME:
|
||||
logger.fdebug('Renaming files according to configuration details : ' + str(mylar.FILE_FORMAT))
|
||||
if mylar.CONFIG.IMP_RENAME:
|
||||
logger.fdebug('Renaming files according to configuration details : ' + str(mylar.CONFIG.FILE_FORMAT))
|
||||
renameit = helpers.rename_param(watch_comicid, watch_comicname, watch_comicyear, watch_comiciss)
|
||||
nfilename = renameit['nfilename']
|
||||
|
||||
|
|
|
@ -80,12 +80,14 @@ def locg(pulldate=None,weeknumber=None,year=None):
|
|||
'comicid': x['comicid'],
|
||||
'issueid': x['issueid'],
|
||||
'weeknumber': x['weeknumber'],
|
||||
'year': x['year']})
|
||||
'year': x['year'],
|
||||
'volume': x['volume'],
|
||||
'seriesyear': x['seriesyear']})
|
||||
shipdate = x['shipdate']
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
myDB.action("CREATE TABLE IF NOT EXISTS weekly (SHIPDATE, PUBLISHER text, ISSUE text, COMIC VARCHAR(150), EXTRA text, STATUS text, ComicID text, IssueID text, CV_Last_Update text, DynamicName text, weeknumber text, year text, rowid INTEGER PRIMARY KEY)")
|
||||
myDB.action("CREATE TABLE IF NOT EXISTS weekly (SHIPDATE, PUBLISHER text, ISSUE text, COMIC VARCHAR(150), EXTRA text, STATUS text, ComicID text, IssueID text, CV_Last_Update text, DynamicName text, weeknumber text, year text, volume text, seriesyear text, rowid INTEGER PRIMARY KEY)")
|
||||
|
||||
#clear out the upcoming table here so they show the new values properly.
|
||||
if pulldate == '00000000':
|
||||
|
@ -117,12 +119,14 @@ def locg(pulldate=None,weeknumber=None,year=None):
|
|||
'COMICID': comicid,
|
||||
'ISSUEID': issueid,
|
||||
'WEEKNUMBER': x['weeknumber'],
|
||||
'YEAR': x['year']}
|
||||
'YEAR': x['year'],
|
||||
'VOLUME': x['volume'],
|
||||
'SERIESYEAR': x['seriesyear']}
|
||||
myDB.upsert("weekly", newValueDict, controlValueDict)
|
||||
|
||||
logger.info('[PULL-LIST] Successfully populated pull-list into Mylar for the week of: ' + str(weeknumber))
|
||||
#set the last poll date/time here so that we don't start overwriting stuff too much...
|
||||
mylar.PULL_REFRESH = todaydate
|
||||
mylar.CONFIG.PULL_REFRESH = todaydate
|
||||
|
||||
return {'status': 'success',
|
||||
'count': len(data),
|
||||
|
|
|
@ -27,6 +27,7 @@ from mylar import helpers
|
|||
|
||||
# These settings are for file logging only
|
||||
FILENAME = 'mylar.log'
|
||||
MAX_LOGSIZE = 1000000
|
||||
MAX_FILES = 5
|
||||
|
||||
# Mylar logger
|
||||
|
@ -42,7 +43,7 @@ class LogListHandler(logging.Handler):
|
|||
message = message.replace("\n", "<br />")
|
||||
mylar.LOG_LIST.insert(0, (helpers.now(), message, record.levelname, record.threadName))
|
||||
|
||||
def initLogger(console=False, log_dir=False, verbose=False):
|
||||
def initLogger(console=False, log_dir=False, init=False, verbose=False):
|
||||
#concurrentLogHandler/0.8.7 (to deal with windows locks)
|
||||
#since this only happens on windows boxes, if it's nix/mac use the default logger.
|
||||
if platform.system() == 'Windows':
|
||||
|
@ -60,11 +61,13 @@ def initLogger(console=False, log_dir=False, verbose=False):
|
|||
mylar.LOGTYPE = 'log'
|
||||
from logging.handlers import RotatingFileHandler as RFHandler
|
||||
|
||||
|
||||
if mylar.MAX_LOGSIZE:
|
||||
MAX_SIZE = mylar.MAX_LOGSIZE
|
||||
if init is True:
|
||||
max_size = 1000000 #1 MB
|
||||
else:
|
||||
MAX_SIZE = 1000000 # 1 MB
|
||||
if mylar.CONFIG.MAX_LOGSIZE:
|
||||
max_size = mylar.CONFIG.MAX_LOGSIZE
|
||||
else:
|
||||
max_size = 1000000 # 1 MB
|
||||
|
||||
"""
|
||||
Setup logging for Mylar. It uses the logger instance with the name
|
||||
|
@ -75,6 +78,12 @@ def initLogger(console=False, log_dir=False, verbose=False):
|
|||
* StreamHandler: for console
|
||||
"""
|
||||
|
||||
logging.getLogger('apscheduler.scheduler').setLevel(logging.WARN)
|
||||
logging.getLogger('apscheduler.threadpool').setLevel(logging.WARN)
|
||||
logging.getLogger('apscheduler.scheduler').propagate = False
|
||||
logging.getLogger('apscheduler.threadpool').propagate = False
|
||||
|
||||
|
||||
# Close and remove old handlers. This is required to reinit the loggers
|
||||
# at runtime
|
||||
for handler in logger.handlers[:]:
|
||||
|
@ -88,14 +97,17 @@ def initLogger(console=False, log_dir=False, verbose=False):
|
|||
|
||||
# Configure the logger to accept all messages
|
||||
logger.propagate = False
|
||||
|
||||
|
||||
#1 is WARN level, 2 is ERROR
|
||||
if mylar.LOG_LEVEL == '1':
|
||||
logger.setLevel(logging.DEBUG if verbose else logging.WARN)
|
||||
elif mylar.LOG_LEVEL == '2':
|
||||
logger.setLevel(logging.DEBUG if verbose else logging.ERROR)
|
||||
else:
|
||||
if init is True:
|
||||
logger.setLevel(logging.DEBUG if verbose else logging.INFO)
|
||||
else:
|
||||
if mylar.CONFIG.LOG_LEVEL == '1':
|
||||
logger.setLevel(logging.DEBUG if verbose else logging.WARN)
|
||||
elif mylar.CONFIG.LOG_LEVEL == '2':
|
||||
logger.setLevel(logging.DEBUG if verbose else logging.ERROR)
|
||||
else:
|
||||
logger.setLevel(logging.DEBUG if verbose else logging.INFO)
|
||||
|
||||
# Add list logger
|
||||
loglist_handler = LogListHandler()
|
||||
|
@ -104,9 +116,9 @@ def initLogger(console=False, log_dir=False, verbose=False):
|
|||
|
||||
# Setup file logger
|
||||
if log_dir:
|
||||
filename = os.path.join(mylar.LOG_DIR, FILENAME)
|
||||
filename = os.path.join(log_dir, FILENAME)
|
||||
file_formatter = Formatter('%(asctime)s - %(levelname)-7s :: %(threadName)s : %(message)s', '%d-%b-%Y %H:%M:%S')
|
||||
file_handler = RFHandler(filename, "a", maxBytes=MAX_SIZE, backupCount=MAX_FILES)
|
||||
file_handler = RFHandler(filename, "a", maxBytes=max_size, backupCount=MAX_FILES)
|
||||
file_handler.setLevel(logging.DEBUG)
|
||||
file_handler.setFormatter(file_formatter)
|
||||
|
||||
|
|
30
mylar/mb.py
30
mylar/mb.py
|
@ -61,16 +61,16 @@ def pullsearch(comicapi, comicquery, offset, explicit, type):
|
|||
#logger.info('MB.PULLURL:' + PULLURL)
|
||||
|
||||
#new CV API restriction - one api request / second.
|
||||
if mylar.CVAPI_RATE is None or mylar.CVAPI_RATE < 2:
|
||||
if mylar.CONFIG.CVAPI_RATE is None or mylar.CONFIG.CVAPI_RATE < 2:
|
||||
time.sleep(2)
|
||||
else:
|
||||
time.sleep(mylar.CVAPI_RATE)
|
||||
time.sleep(mylar.CONFIG.CVAPI_RATE)
|
||||
|
||||
#download the file:
|
||||
payload = None
|
||||
|
||||
try:
|
||||
r = requests.get(PULLURL, params=payload, verify=mylar.CV_VERIFY, headers=mylar.CV_HEADERS)
|
||||
r = requests.get(PULLURL, params=payload, verify=mylar.CONFIG.CV_VERIFY, headers=mylar.CV_HEADERS)
|
||||
except Exception, e:
|
||||
logger.warn('Error fetching data from ComicVine: %s' % (e))
|
||||
return
|
||||
|
@ -121,11 +121,11 @@ def findComic(name, mode, issue, limityear=None, explicit=None, type=None):
|
|||
explicit = 'all'
|
||||
|
||||
|
||||
if mylar.COMICVINE_API == 'None' or mylar.COMICVINE_API is None or mylar.COMICVINE_API == mylar.DEFAULT_CVAPI:
|
||||
logger.warn('You have not specified your own ComicVine API key - alot of things will be limited. Get your own @ http://api.comicvine.com.')
|
||||
comicapi = mylar.DEFAULT_CVAPI
|
||||
if mylar.CONFIG.COMICVINE_API == 'None' or mylar.CONFIG.COMICVINE_API is None:
|
||||
logger.warn('You have not specified your own ComicVine API key - this is a requirement. Get your own @ http://api.comicvine.com.')
|
||||
return
|
||||
else:
|
||||
comicapi = mylar.COMICVINE_API
|
||||
comicapi = mylar.CONFIG.COMICVINE_API
|
||||
|
||||
if type is None:
|
||||
type = 'volume'
|
||||
|
@ -330,7 +330,7 @@ def findComic(name, mode, issue, limityear=None, explicit=None, type=None):
|
|||
xmlpub = "Unknown"
|
||||
|
||||
#ignore specific publishers on a global scale here.
|
||||
if mylar.BLACKLISTED_PUBLISHERS is not None and any([x for x in mylar.BLACKLISTED_PUBLISHERS if x.lower() == xmlpub.lower()]):
|
||||
if mylar.CONFIG.BLACKLISTED_PUBLISHERS is not None and any([x for x in mylar.CONFIG.BLACKLISTED_PUBLISHERS if x.lower() == xmlpub.lower()]):
|
||||
# #'panini' in xmlpub.lower() or 'deagostini' in xmlpub.lower() or 'Editorial Televisa' in xmlpub.lower():
|
||||
logger.fdebug('Blacklisted publisher [' + xmlpub + ']. Ignoring this result.')
|
||||
continue
|
||||
|
@ -396,27 +396,27 @@ def storyarcinfo(xmlid):
|
|||
|
||||
arcinfo = {}
|
||||
|
||||
if mylar.COMICVINE_API == 'None' or mylar.COMICVINE_API is None or mylar.COMICVINE_API == mylar.DEFAULT_CVAPI:
|
||||
logger.warn('You have not specified your own ComicVine API key - alot of things will be limited. Get your own @ http://api.comicvine.com.')
|
||||
comicapi = mylar.DEFAULT_CVAPI
|
||||
if mylar.CONFIG.COMICVINE_API == 'None' or mylar.CONFIG.COMICVINE_API is None:
|
||||
logger.warn('You have not specified your own ComicVine API key - this is a requirement. Get your own @ http://api.comicvine.com.')
|
||||
return
|
||||
else:
|
||||
comicapi = mylar.COMICVINE_API
|
||||
comicapi = mylar.CONFIG.COMICVINE_API
|
||||
|
||||
#respawn to the exact id for the story arc and count the # of issues present.
|
||||
ARCPULL_URL = mylar.CVURL + 'story_arc/4045-' + str(xmlid) + '/?api_key=' + str(comicapi) + '&field_list=issues,publisher,name,first_appeared_in_issue,deck,image&format=xml&offset=0'
|
||||
#logger.fdebug('arcpull_url:' + str(ARCPULL_URL))
|
||||
|
||||
#new CV API restriction - one api request / second.
|
||||
if mylar.CVAPI_RATE is None or mylar.CVAPI_RATE < 2:
|
||||
if mylar.CONFIG.CVAPI_RATE is None or mylar.CONFIG.CVAPI_RATE < 2:
|
||||
time.sleep(2)
|
||||
else:
|
||||
time.sleep(mylar.CVAPI_RATE)
|
||||
time.sleep(mylar.CONFIG.CVAPI_RATE)
|
||||
|
||||
#download the file:
|
||||
payload = None
|
||||
|
||||
try:
|
||||
r = requests.get(ARCPULL_URL, params=payload, verify=mylar.CV_VERIFY, headers=mylar.CV_HEADERS)
|
||||
r = requests.get(ARCPULL_URL, params=payload, verify=mylar.CONFIG.CV_VERIFY, headers=mylar.CV_HEADERS)
|
||||
except Exception, e:
|
||||
logger.warn('Error fetching data from ComicVine: %s' % (e))
|
||||
return
|
||||
|
|
|
@ -25,8 +25,8 @@ def movefiles(comicid, comlocation, imported):
|
|||
srcimp = impr['comiclocation']
|
||||
orig_filename = impr['comicfilename']
|
||||
#before moving check to see if Rename to Mylar structure is enabled.
|
||||
if mylar.IMP_RENAME and mylar.FILE_FORMAT != '':
|
||||
logger.fdebug("Renaming files according to configuration details : " + str(mylar.FILE_FORMAT))
|
||||
if mylar.CONFIG.IMP_RENAME and mylar.CONFIG.FILE_FORMAT != '':
|
||||
logger.fdebug("Renaming files according to configuration details : " + str(mylar.CONFIG.FILE_FORMAT))
|
||||
renameit = helpers.rename_param(comicid, imported['ComicName'], impr['issuenumber'], orig_filename)
|
||||
nfilename = renameit['nfilename']
|
||||
dstimp = os.path.join(comlocation, nfilename)
|
||||
|
|
|
@ -36,16 +36,16 @@ class PROWL:
|
|||
priority = []
|
||||
|
||||
def __init__(self):
|
||||
self.enabled = mylar.PROWL_ENABLED
|
||||
self.keys = mylar.PROWL_KEYS
|
||||
self.priority = mylar.PROWL_PRIORITY
|
||||
self.enabled = mylar.CONFIG.PROWL_ENABLED
|
||||
self.keys = mylar.CONFIG.PROWL_KEYS
|
||||
self.priority = mylar.CONFIG.PROWL_PRIORITY
|
||||
pass
|
||||
|
||||
def conf(self, options):
|
||||
return cherrypy.config['config'].get('Prowl', options)
|
||||
|
||||
def notify(self, message, event, module=None):
|
||||
if not mylar.PROWL_ENABLED:
|
||||
if not mylar.CONFIG.PROWL_ENABLED:
|
||||
return
|
||||
|
||||
if module is None:
|
||||
|
@ -54,11 +54,11 @@ class PROWL:
|
|||
|
||||
http_handler = HTTPSConnection("api.prowlapp.com")
|
||||
|
||||
data = {'apikey': mylar.PROWL_KEYS,
|
||||
data = {'apikey': mylar.CONFIG.PROWL_KEYS,
|
||||
'application': 'Mylar',
|
||||
'event': event,
|
||||
'description': message.encode("utf-8"),
|
||||
'priority': mylar.PROWL_PRIORITY}
|
||||
'priority': mylar.CONFIG.PROWL_PRIORITY}
|
||||
|
||||
http_handler.request("POST",
|
||||
"/publicapi/add",
|
||||
|
@ -87,12 +87,12 @@ class NMA:
|
|||
self.NMA_URL = "https://www.notifymyandroid.com/publicapi/notify"
|
||||
self.TEST_NMA_URL = "https://www.notifymyandroid.com/publicapi/verify"
|
||||
if test_apikey is None:
|
||||
self.apikey = mylar.NMA_APIKEY
|
||||
self.apikey = mylar.CONFIG.NMA_APIKEY
|
||||
self.test = False
|
||||
else:
|
||||
self.apikey = test_apikey
|
||||
self.test = True
|
||||
self.priority = mylar.NMA_PRIORITY
|
||||
self.priority = mylar.CONFIG.NMA_PRIORITY
|
||||
|
||||
self._session = requests.Session()
|
||||
|
||||
|
@ -207,27 +207,27 @@ class PUSHOVER:
|
|||
|
||||
def __init__(self, test_apikey=None, test_userkey=None):
|
||||
self.PUSHOVER_URL = 'https://api.pushover.net/1/messages.json'
|
||||
self.enabled = mylar.PUSHOVER_ENABLED
|
||||
self.enabled = mylar.CONFIG.PUSHOVER_ENABLED
|
||||
if test_apikey is None:
|
||||
if mylar.PUSHOVER_APIKEY is None or mylar.PUSHOVER_APIKEY == 'None':
|
||||
if mylar.CONFIG.PUSHOVER_APIKEY is None or mylar.CONFIG.PUSHOVER_APIKEY == 'None':
|
||||
self.apikey = 'a1KZ1L7d8JKdrtHcUR6eFoW2XGBmwG'
|
||||
else:
|
||||
self.apikey = mylar.PUSHOVER_APIKEY
|
||||
self.apikey = mylar.CONFIG.PUSHOVER_APIKEY
|
||||
else:
|
||||
self.apikey = test_apikey
|
||||
|
||||
if test_userkey is None:
|
||||
self.userkey = mylar.PUSHOVER_USERKEY
|
||||
self.userkey = mylar.CONFIG.PUSHOVER_USERKEY
|
||||
else:
|
||||
self.userkey = test_userkey
|
||||
|
||||
self.priority = mylar.PUSHOVER_PRIORITY
|
||||
self.priority = mylar.CONFIG.PUSHOVER_PRIORITY
|
||||
|
||||
self._session = requests.Session()
|
||||
self._session.headers = {'Content-type': "application/x-www-form-urlencoded"}
|
||||
|
||||
def notify(self, event, message=None, snatched_nzb=None, prov=None, sent_to=None, module=None):
|
||||
if not mylar.PUSHOVER_ENABLED:
|
||||
if not mylar.CONFIG.PUSHOVER_ENABLED:
|
||||
return
|
||||
if module is None:
|
||||
module = ''
|
||||
|
@ -238,11 +238,11 @@ class PUSHOVER:
|
|||
snatched_nzb = snatched_nzb[:-1]
|
||||
message = "Mylar has snatched: " + snatched_nzb + " from " + prov + " and has sent it to " + sent_to
|
||||
|
||||
data = {'token': mylar.PUSHOVER_APIKEY,
|
||||
'user': mylar.PUSHOVER_USERKEY,
|
||||
data = {'token': mylar.CONFIG.PUSHOVER_APIKEY,
|
||||
'user': mylar.CONFIG.PUSHOVER_USERKEY,
|
||||
'message': message.encode("utf-8"),
|
||||
'title': event,
|
||||
'priority': mylar.PUSHOVER_PRIORITY}
|
||||
'priority': mylar.CONFIG.PUSHOVER_PRIORITY}
|
||||
|
||||
r = self._session.post(self.PUSHOVER_URL, data=data, verify=True)
|
||||
|
||||
|
@ -280,7 +280,7 @@ class BOXCAR:
|
|||
try:
|
||||
|
||||
data = urllib.urlencode({
|
||||
'user_credentials': mylar.BOXCAR_TOKEN,
|
||||
'user_credentials': mylar.CONFIG.BOXCAR_TOKEN,
|
||||
'notification[title]': title.encode('utf-8').strip(),
|
||||
'notification[long_message]': msg.encode('utf-8'),
|
||||
'notification[sound]': "done"
|
||||
|
@ -318,7 +318,7 @@ class BOXCAR:
|
|||
module = ''
|
||||
module += '[NOTIFIER]'
|
||||
|
||||
if not mylar.BOXCAR_ENABLED and not force:
|
||||
if not mylar.CONFIG.BOXCAR_ENABLED and not force:
|
||||
logger.fdebug(module + ' Notification for Boxcar not enabled, skipping this notification.')
|
||||
return False
|
||||
|
||||
|
@ -343,11 +343,11 @@ class PUSHBULLET:
|
|||
def __init__(self, test_apikey=None):
|
||||
self.PUSH_URL = "https://api.pushbullet.com/v2/pushes"
|
||||
if test_apikey is None:
|
||||
self.apikey = mylar.PUSHBULLET_APIKEY
|
||||
self.apikey = mylar.CONFIG.PUSHBULLET_APIKEY
|
||||
else:
|
||||
self.apikey = test_apikey
|
||||
self.deviceid = mylar.PUSHBULLET_DEVICEID
|
||||
self.channel_tag = mylar.PUSHBULLET_CHANNEL_TAG
|
||||
self.deviceid = mylar.CONFIG.PUSHBULLET_DEVICEID
|
||||
self.channel_tag = mylar.CONFIG.PUSHBULLET_CHANNEL_TAG
|
||||
self._json_header = {'Content-Type': 'application/json',
|
||||
'Authorization': 'Basic %s' % base64.b64encode(self.apikey + ":")}
|
||||
self._session = requests.Session()
|
||||
|
@ -415,16 +415,16 @@ class TELEGRAM:
|
|||
def __init__(self, test_userid=None, test_token=None):
|
||||
self.TELEGRAM_API = "https://api.telegram.org/bot%s/%s"
|
||||
if test_userid is None:
|
||||
self.userid = mylar.TELEGRAM_USERID
|
||||
self.userid = mylar.CONFIG.TELEGRAM_USERID
|
||||
else:
|
||||
self.userid = test_userid
|
||||
if test_token is None:
|
||||
self.token = mylar.TELEGRAM_TOKEN
|
||||
self.token = mylar.CONFIG.TELEGRAM_TOKEN
|
||||
else:
|
||||
self.token = test_token
|
||||
|
||||
def notify(self, message, status):
|
||||
if not mylar.TELEGRAM_ENABLED:
|
||||
if not mylar.CONFIG.TELEGRAM_ENABLED:
|
||||
return
|
||||
|
||||
# Construct message
|
||||
|
@ -450,7 +450,7 @@ class TELEGRAM:
|
|||
|
||||
class SLACK:
|
||||
def __init__(self, test_webhook_url=None):
|
||||
self.webhook_url = mylar.SLACK_WEBHOOK_URL if test_webhook_url is None else test_webhook_url
|
||||
self.webhook_url = mylar.CONFIG.SLACK_WEBHOOK_URL if test_webhook_url is None else test_webhook_url
|
||||
|
||||
def notify(self, text, attachment_text, module=None):
|
||||
if module is None:
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
# This file is part of Mylar.
|
||||
#
|
||||
# Mylar is free software: you can redistribute it and/or modify
|
||||
|
@ -59,8 +58,8 @@ class Readinglist(object):
|
|||
logger.info(self.module + ' Issue not located on your current watchlist. I should probably check story-arcs but I do not have that capability just yet.')
|
||||
else:
|
||||
locpath = None
|
||||
if mylar.MULTIPLE_DEST_DIRS is not None and mylar.MULTIPLE_DEST_DIRS != 'None' and os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(comicinfo['ComicLocation'])) != comicinfo['ComicLocation']:
|
||||
pathdir = os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(comicinfo['ComicLocation']))
|
||||
if mylar.CONFIG.MULTIPLE_DEST_DIRS is not None and mylar.CONFIG.MULTIPLE_DEST_DIRS != 'None' and os.path.join(mylar.CONFIG.MULTIPLE_DEST_DIRS, os.path.basename(comicinfo['ComicLocation'])) != comicinfo['ComicLocation']:
|
||||
pathdir = os.path.join(mylar.CONFIG.MULTIPLE_DEST_DIRS, os.path.basename(comicinfo['ComicLocation']))
|
||||
if os.path.exists(os.path.join(pathdir, readlist['Location'])):
|
||||
locpath = os.path.join(pathdir, readlist['Location'])
|
||||
else:
|
||||
|
@ -75,7 +74,7 @@ class Readinglist(object):
|
|||
comicname = comicinfo['ComicName']
|
||||
dspinfo = comicname + ' #' + comicissue
|
||||
if annualize:
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
comicissue = 'Annual ' + readlist['Issue_Number']
|
||||
dspinfo = comicname + ' Annual #' + readlist['Issue_Number']
|
||||
else:
|
||||
|
@ -169,11 +168,11 @@ class Readinglist(object):
|
|||
# comiclocation = cid['ComicLocation']
|
||||
# comicid = cid['ComicID']
|
||||
|
||||
# if mylar.MULTIPLE_DEST_DIRS is not None and mylar.MULTIPLE_DEST_DIRS != 'None' and os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(comiclocation)) != comiclocation:
|
||||
# logger.fdebug(module + ' Multiple_dest_dirs:' + mylar.MULTIPLE_DEST_DIRS)
|
||||
# if mylar.CONFIG.MULTIPLE_DEST_DIRS is not None and mylar.CONFIG.MULTIPLE_DEST_DIRS != 'None' and os.path.join(mylar.CONFIG.MULTIPLE_DEST_DIRS, os.path.basename(comiclocation)) != comiclocation:
|
||||
# logger.fdebug(module + ' Multiple_dest_dirs:' + mylar.CONFIG.MULTIPLE_DEST_DIRS)
|
||||
# logger.fdebug(module + ' Dir: ' + comiclocation)
|
||||
# logger.fdebug(module + ' Os.path.basename: ' + os.path.basename(comiclocation))
|
||||
# pathdir = os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(comiclocation))
|
||||
# pathdir = os.path.join(mylar.CONFIG.MULTIPLE_DEST_DIRS, os.path.basename(comiclocation))
|
||||
if os.path.exists(clist['filepath']):
|
||||
sendlist.append({"issueid": clist['issueid'],
|
||||
"filepath": clist['filepath'],
|
||||
|
@ -207,8 +206,8 @@ class Readinglist(object):
|
|||
import shlex
|
||||
import subprocess
|
||||
|
||||
#fhost = mylar.TAB_HOST.find(':')
|
||||
host = mylar.TAB_HOST[:mylar.TAB_HOST.find(':')]
|
||||
#fhost = mylar.CONFIG.TAB_HOST.find(':')
|
||||
host = mylar.CONFIG.TAB_HOST[:mylar.CONFIG.TAB_HOST.find(':')]
|
||||
|
||||
if 'windows' not in mylar.OS_DETECT.lower():
|
||||
cmdstring = str('ping -c1 ' + str(host))
|
||||
|
|
|
@ -46,14 +46,14 @@ def torrents(pickfeed=None, seriesname=None, issue=None, feedinfo=None):
|
|||
if issue:
|
||||
srchterm += '%20' + str(issue)
|
||||
|
||||
if mylar.TPSE_PROXY:
|
||||
if mylar.TPSE_PROXY.endswith('/'):
|
||||
tpse_url = mylar.TPSE_PROXY
|
||||
if mylar.CONFIG.TPSE_PROXY:
|
||||
if mylar.CONFIG.TPSE_PROXY.endswith('/'):
|
||||
tpse_url = mylar.CONFIG.TPSE_PROXY
|
||||
else:
|
||||
tpse_url = mylar.TPSE_PROXY + '/'
|
||||
tpse_url = mylar.CONFIG.TPSE_PROXY + '/'
|
||||
else:
|
||||
#switched to https.
|
||||
tpse_url = mylar.TPSEURL
|
||||
tpse_url = mylar.CONFIG.TPSEURL
|
||||
|
||||
#this is for the public trackers included thus far in order to properly cycle throught the correct ones depending on the search request
|
||||
# TPSE = search only
|
||||
|
@ -90,34 +90,34 @@ def torrents(pickfeed=None, seriesname=None, issue=None, feedinfo=None):
|
|||
|
||||
feedtype = None
|
||||
|
||||
if pickfeed == "1" and mylar.ENABLE_32P: # 32pages new releases feed.
|
||||
if pickfeed == "1" and mylar.CONFIG.ENABLE_32P: # 32pages new releases feed.
|
||||
feed = 'https://32pag.es/feeds.php?feed=torrents_all&user=' + feedinfo['user'] + '&auth=' + feedinfo['auth'] + '&passkey=' + feedinfo['passkey'] + '&authkey=' + feedinfo['authkey']
|
||||
feedtype = ' from the New Releases RSS Feed for comics'
|
||||
verify = bool(mylar.VERIFY_32P)
|
||||
verify = bool(mylar.CONFIG.VERIFY_32P)
|
||||
elif pickfeed == "2" and srchterm is not None: # TP.SE search / RSS
|
||||
feed = tpse_url + 'rss/' + str(srchterm) + '/'
|
||||
verify = bool(mylar.TPSE_VERIFY)
|
||||
verify = bool(mylar.CONFIG.TPSE_VERIFY)
|
||||
elif pickfeed == "3": # TP.SE rss feed (3101 = comics category) / non-RSS
|
||||
feed = tpse_url + '?hl=en&safe=off&num=50&start=0&orderby=best&s=&filter=3101'
|
||||
feedtype = ' from the New Releases RSS Feed for comics from TP.SE'
|
||||
verify = bool(mylar.TPSE_VERIFY)
|
||||
verify = bool(mylar.CONFIG.TPSE_VERIFY)
|
||||
elif pickfeed == "4": #32p search
|
||||
if any([mylar.USERNAME_32P is None, mylar.USERNAME_32P == '', mylar.PASSWORD_32P is None, mylar.PASSWORD_32P == '']):
|
||||
if any([mylar.CONFIG.USERNAME_32P is None, mylar.CONFIG.USERNAME_32P == '', mylar.CONFIG.PASSWORD_32P is None, mylar.CONFIG.PASSWORD_32P == '']):
|
||||
logger.error('[RSS] Warning - you NEED to enter in your 32P Username and Password to use this option.')
|
||||
lp=+1
|
||||
continue
|
||||
if mylar.MODE_32P == 0:
|
||||
if mylar.CONFIG.MODE_32P == 0:
|
||||
logger.warn('[32P] Searching is not available in 32p Legacy mode. Switch to Auth mode to use the search functionality.')
|
||||
lp=+1
|
||||
continue
|
||||
return
|
||||
elif pickfeed == "5" and srchterm is not None: # demonoid search / non-RSS
|
||||
feed = mylar.DEMURL + "files/?category=10&subcategory=All&language=0&seeded=2&external=2&query=" + str(srchterm) + "&uid=0&out=rss"
|
||||
verify = bool(mylar.TPSE_VERIFY)
|
||||
verify = bool(mylar.CONFIG.TPSE_VERIFY)
|
||||
elif pickfeed == "6": # demonoid rss feed
|
||||
feed = mylar.DEMURL + 'rss/10.xml'
|
||||
feedtype = ' from the New Releases RSS Feed from Demonoid'
|
||||
verify = bool(mylar.TPSE_VERIFY)
|
||||
verify = bool(mylar.CONFIG.TPSE_VERIFY)
|
||||
elif pickfeed == "999": #WWT rss feed
|
||||
feed = mylar.WWTURL + 'rss.php?cat=132,50'
|
||||
feedtype = ' from the New Releases RSS Feed from WorldWideTorrents'
|
||||
|
@ -126,7 +126,7 @@ def torrents(pickfeed=None, seriesname=None, issue=None, feedinfo=None):
|
|||
#get the info here
|
||||
feed = 'https://32pag.es/feeds.php?feed=' + feedinfo['feed'] + '&user=' + feedinfo['user'] + '&auth=' + feedinfo['auth'] + '&passkey=' + feedinfo['passkey'] + '&authkey=' + feedinfo['authkey'] + '&name=' + feedinfo['feedname']
|
||||
feedtype = ' from your Personal Notification Feed : ' + feedinfo['feedname']
|
||||
verify = bool(mylar.VERIFY_32P)
|
||||
verify = bool(mylar.CONFIG.VERIFY_32P)
|
||||
else:
|
||||
logger.error('invalid pickfeed denoted...')
|
||||
return
|
||||
|
@ -177,7 +177,7 @@ def torrents(pickfeed=None, seriesname=None, issue=None, feedinfo=None):
|
|||
justdigits = entry['file_size'] #size not available in follow-list rss feed
|
||||
seeddigits = entry['seeders'] #number of seeders not available in follow-list rss feed
|
||||
|
||||
if int(seeddigits) >= int(mylar.MINSEEDS):
|
||||
if int(seeddigits) >= int(mylar.CONFIG.MINSEEDS):
|
||||
torthe32p.append({
|
||||
'site': picksite,
|
||||
'title': entry['torrent_seriesname'].lstrip() + ' ' + entry['torrent_seriesvol'] + ' #' + entry['torrent_seriesiss'],
|
||||
|
@ -338,7 +338,7 @@ def torrents(pickfeed=None, seriesname=None, issue=None, feedinfo=None):
|
|||
# itd = True
|
||||
|
||||
|
||||
if int(mylar.MINSEEDS) >= int(seeddigits):
|
||||
if int(mylar.CONFIG.MINSEEDS) >= int(seeddigits):
|
||||
#new releases has it as '&id', notification feeds have it as %ampid (possibly even &id
|
||||
link = feedme.entries[i].link
|
||||
link = re.sub('&','&', link)
|
||||
|
@ -403,26 +403,28 @@ def nzbs(provider=None, forcerss=False):
|
|||
|
||||
newznab_hosts = []
|
||||
|
||||
if mylar.NEWZNAB == 1:
|
||||
for newznab_host in mylar.EXTRA_NEWZNABS:
|
||||
logger.info('config.newznab: %s' % mylar.CONFIG.NEWZNAB)
|
||||
logger.info('extra.newznabs: %s' % mylar.CONFIG.EXTRA_NEWZNABS)
|
||||
if mylar.CONFIG.NEWZNAB == 1:
|
||||
for newznab_host in mylar.CONFIG.EXTRA_NEWZNABS:
|
||||
logger.fdebug('[RSS] newznab name: ' + str(newznab_host[0]) + ' - enabled: ' + str(newznab_host[5]))
|
||||
if str(newznab_host[5]) == '1':
|
||||
newznab_hosts.append(newznab_host)
|
||||
|
||||
providercount = len(newznab_hosts) + int(mylar.EXPERIMENTAL == 1) + int(mylar.NZBSU == 1) + int(mylar.DOGNZB == 1)
|
||||
providercount = len(newznab_hosts) + int(mylar.CONFIG.EXPERIMENTAL == 1) + int(mylar.CONFIG.NZBSU == 1) + int(mylar.CONFIG.DOGNZB == 1)
|
||||
logger.fdebug('[RSS] You have enabled ' + str(providercount) + ' NZB RSS search providers.')
|
||||
|
||||
if mylar.EXPERIMENTAL == 1:
|
||||
if mylar.CONFIG.EXPERIMENTAL == 1:
|
||||
max_entries = "250" if forcerss else "50"
|
||||
_parse_feed('experimental', 'http://nzbindex.nl/rss/alt.binaries.comics.dcp/?sort=agedesc&max=' + max_entries + '&more=1', False)
|
||||
|
||||
if mylar.NZBSU == 1:
|
||||
if mylar.CONFIG.NZBSU == 1:
|
||||
num_items = "&num=100" if forcerss else "" # default is 25
|
||||
_parse_feed('nzb.su', 'http://api.nzb.su/rss?t=7030&dl=1&i=' + (mylar.NZBSU_UID or '1') + '&r=' + mylar.NZBSU_APIKEY + num_items, bool(mylar.NZBSU_VERIFY))
|
||||
_parse_feed('nzb.su', 'http://api.nzb.su/rss?t=7030&dl=1&i=' + (mylar.CONFIG.NZBSU_UID or '1') + '&r=' + mylar.CONFIG.NZBSU_APIKEY + num_items, bool(mylar.CONFIG.NZBSU_VERIFY))
|
||||
|
||||
if mylar.DOGNZB == 1:
|
||||
if mylar.CONFIG.DOGNZB == 1:
|
||||
num_items = "&num=100" if forcerss else "" # default is 25
|
||||
_parse_feed('dognzb', 'https://dognzb.cr/rss.cfm?r=' + mylar.DOGNZB_APIKEY + '&t=7030' + num_items, bool(mylar.DOGNZB_VERIFY))
|
||||
_parse_feed('dognzb', 'https://dognzb.cr/rss.cfm?r=' + mylar.CONFIG.DOGNZB_APIKEY + '&t=7030' + num_items, bool(mylar.CONFIG.DOGNZB_VERIFY))
|
||||
|
||||
for newznab_host in newznab_hosts:
|
||||
site = newznab_host[0].rstrip()
|
||||
|
@ -463,7 +465,7 @@ def nzbs(provider=None, forcerss=False):
|
|||
|
||||
#Remove the API keys from the url to allow for possible api key changes
|
||||
if site == 'dognzb':
|
||||
link = re.sub(mylar.DOGNZB_APIKEY, '', link).strip()
|
||||
link = re.sub(mylar.CONFIG.DOGNZB_APIKEY, '', link).strip()
|
||||
else:
|
||||
link = link[:link.find('&i=')].strip()
|
||||
|
||||
|
@ -515,7 +517,7 @@ def rssdbupdate(feeddata, i, type):
|
|||
|
||||
myDB.upsert("rssdb", newVal, ctrlVal)
|
||||
|
||||
logger.fdebug('Completed adding new data to RSS DB. Next add in ' + str(mylar.RSS_CHECKINTERVAL) + ' minutes')
|
||||
logger.fdebug('Completed adding new data to RSS DB. Next add in ' + str(mylar.CONFIG.RSS_CHECKINTERVAL) + ' minutes')
|
||||
return
|
||||
|
||||
|
||||
|
@ -539,11 +541,11 @@ def torrentdbsearch(seriesname, issue, comicid=None, nzbprov=None, oneoff=False)
|
|||
tsearch_rem2 = re.sub("\\bthe\\b", "%", tsearch_rem1.lower())
|
||||
tsearch_removed = re.sub('\s+', ' ', tsearch_rem2)
|
||||
tsearch_seriesname = re.sub('[\'\!\@\#\$\%\:\-\;\/\\=\?\&\.\s\,]', '%', tsearch_removed)
|
||||
if mylar.PREFERRED_QUALITY == 0:
|
||||
if mylar.CONFIG.PREFERRED_QUALITY == 0:
|
||||
tsearch = tsearch_seriesname + "%"
|
||||
elif mylar.PREFERRED_QUALITY == 1:
|
||||
elif mylar.CONFIG.PREFERRED_QUALITY == 1:
|
||||
tsearch = tsearch_seriesname + "%cbr%"
|
||||
elif mylar.PREFERRED_QUALITY == 2:
|
||||
elif mylar.CONFIG.PREFERRED_QUALITY == 2:
|
||||
tsearch = tsearch_seriesname + "%cbz%"
|
||||
else:
|
||||
tsearch = tsearch_seriesname + "%"
|
||||
|
@ -553,9 +555,9 @@ def torrentdbsearch(seriesname, issue, comicid=None, nzbprov=None, oneoff=False)
|
|||
tresults = []
|
||||
tsearch = '%' + tsearch
|
||||
|
||||
if mylar.ENABLE_32P and nzbprov == '32P':
|
||||
if mylar.CONFIG.ENABLE_32P and nzbprov == '32P':
|
||||
tresults = myDB.select("SELECT * FROM rssdb WHERE Title like ? AND Site='32P'", [tsearch])
|
||||
if mylar.ENABLE_TPSE and nzbprov == 'TPSE':
|
||||
if mylar.CONFIG.ENABLE_TPSE and nzbprov == 'TPSE':
|
||||
tresults += myDB.select("SELECT * FROM rssdb WHERE Title like ? AND (Site='DEM' OR Site='WWT')", [tsearch])
|
||||
|
||||
logger.fdebug('seriesname_alt:' + str(seriesname_alt))
|
||||
|
@ -582,19 +584,19 @@ def torrentdbsearch(seriesname, issue, comicid=None, nzbprov=None, oneoff=False)
|
|||
if AS_formatrem_seriesname[:1] == ' ': AS_formatrem_seriesname = AS_formatrem_seriesname[1:]
|
||||
AS_Alt.append(AS_formatrem_seriesname)
|
||||
|
||||
if mylar.PREFERRED_QUALITY == 0:
|
||||
if mylar.CONFIG.PREFERRED_QUALITY == 0:
|
||||
AS_Alternate += "%"
|
||||
elif mylar.PREFERRED_QUALITY == 1:
|
||||
elif mylar.CONFIG.PREFERRED_QUALITY == 1:
|
||||
AS_Alternate += "%cbr%"
|
||||
elif mylar.PREFERRED_QUALITY == 2:
|
||||
elif mylar.CONFIG.PREFERRED_QUALITY == 2:
|
||||
AS_Alternate += "%cbz%"
|
||||
else:
|
||||
AS_Alternate += "%"
|
||||
|
||||
AS_Alternate = '%' + AS_Alternate
|
||||
if mylar.ENABLE_32P and nzbprov == '32P':
|
||||
if mylar.CONFIG.ENABLE_32P and nzbprov == '32P':
|
||||
tresults += myDB.select("SELECT * FROM rssdb WHERE Title like ? AND Site='32P'", [AS_Alternate])
|
||||
if mylar.ENABLE_TPSE and nzbprov == 'TPSE':
|
||||
if mylar.CONFIG.ENABLE_TPSE and nzbprov == 'TPSE':
|
||||
tresults += myDB.select("SELECT * FROM rssdb WHERE Title like ? AND (Site='DEM' OR Site='WWT')", [AS_Alternate])
|
||||
|
||||
if tresults is None:
|
||||
|
@ -611,13 +613,13 @@ def torrentdbsearch(seriesname, issue, comicid=None, nzbprov=None, oneoff=False)
|
|||
torTITLE = re.sub('&', '&', tor['Title']).strip()
|
||||
|
||||
#torsplit = torTITLE.split(' ')
|
||||
if mylar.PREFERRED_QUALITY == 1:
|
||||
if mylar.CONFIG.PREFERRED_QUALITY == 1:
|
||||
if 'cbr' in torTITLE:
|
||||
logger.fdebug('Quality restriction enforced [ cbr only ]. Accepting result.')
|
||||
else:
|
||||
logger.fdebug('Quality restriction enforced [ cbr only ]. Rejecting result.')
|
||||
continue
|
||||
elif mylar.PREFERRED_QUALITY == 2:
|
||||
elif mylar.CONFIG.PREFERRED_QUALITY == 2:
|
||||
if 'cbz' in torTITLE:
|
||||
logger.fdebug('Quality restriction enforced [ cbz only ]. Accepting result.')
|
||||
else:
|
||||
|
@ -627,7 +629,7 @@ def torrentdbsearch(seriesname, issue, comicid=None, nzbprov=None, oneoff=False)
|
|||
#logger.fdebug('there are ' + str(len(torsplit)) + ' sections in this title')
|
||||
i=0
|
||||
if nzbprov is not None:
|
||||
if nzbprov != tor['Site'] and not any([mylar.ENABLE_TPSE, tor['Site'] != 'WWT', tor['Site'] != 'DEM']):
|
||||
if nzbprov != tor['Site'] and not any([mylar.CONFIG.ENABLE_TPSE, tor['Site'] != 'WWT', tor['Site'] != 'DEM']):
|
||||
logger.fdebug('this is a result from ' + str(tor['Site']) + ', not the site I am looking for of ' + str(nzbprov))
|
||||
continue
|
||||
#0 holds the title/issue and format-type.
|
||||
|
@ -768,8 +770,8 @@ def nzbdbsearch(seriesname, issue, comicid=None, nzbprov=None, searchYear=None,
|
|||
ComVersChk = 0
|
||||
|
||||
filetype = None
|
||||
if mylar.PREFERRED_QUALITY == 1: filetype = 'cbr'
|
||||
elif mylar.PREFERRED_QUALITY == 2: filetype = 'cbz'
|
||||
if mylar.CONFIG.PREFERRED_QUALITY == 1: filetype = 'cbr'
|
||||
elif mylar.CONFIG.PREFERRED_QUALITY == 2: filetype = 'cbz'
|
||||
|
||||
for results in nresults:
|
||||
title = results['Title']
|
||||
|
@ -840,15 +842,15 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site, pubhash=None):
|
|||
if linkit[-7:] != "torrent":
|
||||
filename += ".torrent"
|
||||
if any([mylar.USE_UTORRENT, mylar.USE_RTORRENT, mylar.USE_TRANSMISSION, mylar.USE_DELUGE, mylar.USE_QBITTORRENT]):
|
||||
filepath = os.path.join(mylar.CACHE_DIR, filename)
|
||||
filepath = os.path.join(mylar.CONFIG.CACHE_DIR, filename)
|
||||
logger.fdebug('filename for torrent set to : ' + filepath)
|
||||
|
||||
elif mylar.USE_WATCHDIR:
|
||||
if mylar.TORRENT_LOCAL and mylar.LOCAL_WATCHDIR is not None:
|
||||
filepath = os.path.join(mylar.LOCAL_WATCHDIR, filename)
|
||||
if mylar.CONFIG.TORRENT_LOCAL and mylar.CONFIG.LOCAL_WATCHDIR is not None:
|
||||
filepath = os.path.join(mylar.CONFIG.LOCAL_WATCHDIR, filename)
|
||||
logger.fdebug('filename for torrent set to : ' + filepath)
|
||||
elif mylar.TORRENT_SEEDBOX and mylar.SEEDBOX_WATCHDIR is not None:
|
||||
filepath = os.path.join(mylar.CACHE_DIR, filename)
|
||||
elif mylar.CONFIG.TORRENT_SEEDBOX and mylar.CONFIG.SEEDBOX_WATCHDIR is not None:
|
||||
filepath = os.path.join(mylar.CONFIG.CACHE_DIR, filename)
|
||||
logger.fdebug('filename for torrent set to : ' + filepath)
|
||||
else:
|
||||
logger.error('No Local Watch Directory or Seedbox Watch Directory specified. Set it and try again.')
|
||||
|
@ -858,14 +860,14 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site, pubhash=None):
|
|||
if site == '32P':
|
||||
url = 'https://32pag.es/torrents.php'
|
||||
|
||||
if mylar.VERIFY_32P == 1 or mylar.VERIFY_32P == True:
|
||||
if mylar.CONFIG.VERIFY_32P == 1 or mylar.CONFIG.VERIFY_32P == True:
|
||||
verify = True
|
||||
else:
|
||||
verify = False
|
||||
|
||||
logger.fdebug('[32P] Verify SSL set to : ' + str(verify))
|
||||
if mylar.MODE_32P == 0:
|
||||
if mylar.KEYS_32P is None or mylar.PASSKEY_32P is None:
|
||||
if mylar.CONFIG.MODE_32P == 0:
|
||||
if mylar.KEYS_32P is None or mylar.CONFIG.PASSKEY_32P is None:
|
||||
logger.warn('[32P] Unable to retrieve keys from provided RSS Feed. Make sure you have provided a CURRENT RSS Feed from 32P')
|
||||
mylar.KEYS_32P = helpers.parse_32pfeed(mylar.FEED_32P)
|
||||
if mylar.KEYS_32P is None or mylar.KEYS_32P == '':
|
||||
|
@ -877,31 +879,30 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site, pubhash=None):
|
|||
logger.fdebug('[32P-AUTHENTICATION] 32P (Legacy) Authentication already done. Attempting to use existing keys.')
|
||||
mylar.AUTHKEY_32P = mylar.KEYS_32P['authkey']
|
||||
else:
|
||||
if any([mylar.USERNAME_32P is None, mylar.USERNAME_32P == '', mylar.PASSWORD_32P is None, mylar.PASSWORD_32P == '']):
|
||||
if any([mylar.CONFIG.USERNAME_32P is None, mylar.CONFIG.USERNAME_32P == '', mylar.CONFIG.PASSWORD_32P is None, mylar.CONFIG.PASSWORD_32P == '']):
|
||||
logger.error('[RSS] Unable to sign-on to 32P to validate settings and initiate download sequence. Please enter/check your username password in the configuration.')
|
||||
return "fail"
|
||||
elif mylar.PASSKEY_32P is None or mylar.AUTHKEY_32P is None or mylar.KEYS_32P is None:
|
||||
elif mylar.CONFIG.PASSKEY_32P is None or mylar.AUTHKEY_32P is None or mylar.KEYS_32P is None:
|
||||
logger.fdebug('[32P-AUTHENTICATION] 32P (Auth Mode) Authentication enabled. Keys have not been established yet, attempting to gather.')
|
||||
feed32p = auth32p.info32p(reauthenticate=True)
|
||||
feedinfo = feed32p.authenticate()
|
||||
if feedinfo == "disable":
|
||||
mylar.ENABLE_32P = 0
|
||||
mylar.config_write()
|
||||
mylar.CONFIG.ENABLE_32P = 0
|
||||
#mylar.config_write()
|
||||
return "fail"
|
||||
if mylar.PASSKEY_32P is None or mylar.AUTHKEY_32P is None or mylar.KEYS_32P is None:
|
||||
if mylar.CONFIG.PASSKEY_32P is None or mylar.AUTHKEY_32P is None or mylar.KEYS_32P is None:
|
||||
logger.error('[RSS] Unable to sign-on to 32P to validate settings and initiate download sequence. Please enter/check your username password in the configuration.')
|
||||
return "fail"
|
||||
else:
|
||||
logger.fdebug('[32P-AUTHENTICATION] 32P (Auth Mode) Authentication already done. Attempting to use existing keys.')
|
||||
|
||||
payload = {'action': 'download',
|
||||
'torrent_pass': mylar.PASSKEY_32P,
|
||||
'torrent_pass': mylar.CONFIG.PASSKEY_32P,
|
||||
'authkey': mylar.AUTHKEY_32P,
|
||||
'id': linkit}
|
||||
|
||||
headers = None #{'Accept-encoding': 'gzip',
|
||||
# 'User-Agent': str(mylar.USER_AGENT)}
|
||||
|
||||
elif site == 'TPSE':
|
||||
pass
|
||||
#linkit should be the magnet link since it's TPSE
|
||||
|
@ -989,26 +990,27 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site, pubhash=None):
|
|||
logger.warn('[EPIC FAILURE] Cannot load the requests module')
|
||||
return "fail"
|
||||
try:
|
||||
logger.info('url: %s' % url)
|
||||
logger.info('payload: %s' % payload)
|
||||
scraper = cfscrape.create_scraper()
|
||||
if cf_cookievalue:
|
||||
r = scraper.get(url, params=payload, cookies=cf_cookievalue, verify=verify, stream=True, headers=headers)
|
||||
else:
|
||||
r = scraper.get(url, params=payload, verify=verify, stream=True, headers=headers)
|
||||
#r = requests.get(url, params=payload, verify=verify, stream=True, headers=headers)
|
||||
|
||||
except Exception, e:
|
||||
logger.warn('Error fetching data from %s (%s): %s' % (site, url, e))
|
||||
if site == '32P':
|
||||
logger.info('[TOR2CLIENT-32P] Retrying with 32P')
|
||||
if mylar.MODE_32P == 1:
|
||||
if mylar.CONFIG.MODE_32P == 1:
|
||||
|
||||
logger.info('[TOR2CLIENT-32P] Attempting to re-authenticate against 32P and poll new keys as required.')
|
||||
feed32p = auth32p.info32p(reauthenticate=True)
|
||||
feedinfo = feed32p.authenticate()
|
||||
|
||||
if feedinfo == "disable":
|
||||
mylar.ENABLE_32P = 0
|
||||
mylar.config_write()
|
||||
mylar.CONFIG.ENABLE_32P = 0
|
||||
#mylar.config_write()
|
||||
return "fail"
|
||||
|
||||
logger.debug('[TOR2CLIENT-32P] Creating CF Scraper')
|
||||
|
@ -1025,7 +1027,6 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site, pubhash=None):
|
|||
logger.warn('[TOR2CLIENT-32P] Unable to authenticate using existing RSS Feed given. Make sure that you have provided a CURRENT feed from 32P')
|
||||
return "fail"
|
||||
else:
|
||||
logger.info('blah: ' + str(r.status_code))
|
||||
return "fail"
|
||||
|
||||
if any([site == 'TPSE', site == 'DEM', site == 'WWT']) and any([str(r.status_code) == '403', str(r.status_code) == '404', str(r.status_code) == '503']):
|
||||
|
@ -1053,6 +1054,7 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site, pubhash=None):
|
|||
|
||||
if str(r.status_code) != '200':
|
||||
logger.warn('Unable to download torrent from ' + site + ' [Status Code returned: ' + str(r.status_code) + ']')
|
||||
logger.info('content: %s' % r.content)
|
||||
return "fail"
|
||||
|
||||
if any([site == 'TPSE', site == 'DEM', site == 'WWT']):
|
||||
|
@ -1102,7 +1104,7 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site, pubhash=None):
|
|||
elif mylar.USE_TRANSMISSION:
|
||||
try:
|
||||
rpc = transmission.TorrentClient()
|
||||
if not rpc.connect(mylar.TRANSMISSION_HOST, mylar.TRANSMISSION_USERNAME, mylar.TRANSMISSION_PASSWORD):
|
||||
if not rpc.connect(mylar.CONFIG.TRANSMISSION_HOST, mylar.CONFIG.TRANSMISSION_USERNAME, mylar.CONFIG.TRANSMISSION_PASSWORD):
|
||||
return "fail"
|
||||
torrent_info = rpc.load_torrent(filepath)
|
||||
if torrent_info:
|
||||
|
@ -1118,7 +1120,7 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site, pubhash=None):
|
|||
elif mylar.USE_DELUGE:
|
||||
try:
|
||||
dc = deluge.TorrentClient()
|
||||
if not dc.connect(mylar.DELUGE_HOST, mylar.DELUGE_USERNAME, mylar.DELUGE_PASSWORD):
|
||||
if not dc.connect(mylar.CONFIG.DELUGE_HOST, mylar.CONFIG.DELUGE_USERNAME, mylar.CONFIG.DELUGE_PASSWORD):
|
||||
logger.info('Not connected to Deluge!')
|
||||
return "fail"
|
||||
else:
|
||||
|
@ -1139,7 +1141,7 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site, pubhash=None):
|
|||
elif mylar.USE_QBITTORRENT:
|
||||
try:
|
||||
qc = qbittorrent.TorrentClient()
|
||||
if not qc.connect(mylar.QBITTORRENT_HOST, mylar.QBITTORRENT_USERNAME, mylar.QBITTORRENT_PASSWORD):
|
||||
if not qc.connect(mylar.CONFIG.QBITTORRENT_HOST, mylar.CONFIG.QBITTORRENT_USERNAME, mylar.CONFIG.QBITTORRENT_PASSWORD):
|
||||
logger.info('Not connected to qBittorrent - Make sure the Web UI is enabled and the port is correct!')
|
||||
return "fail"
|
||||
else:
|
||||
|
@ -1158,7 +1160,7 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site, pubhash=None):
|
|||
return "fail"
|
||||
|
||||
elif mylar.USE_WATCHDIR:
|
||||
if mylar.TORRENT_LOCAL:
|
||||
if mylar.CONFIG.TORRENT_LOCAL:
|
||||
if site == 'TPSE':
|
||||
torrent_info = {'hash': pubhash}
|
||||
else:
|
||||
|
|
|
@ -28,7 +28,6 @@ class tehMain():
|
|||
pass
|
||||
|
||||
def run(self, forcerss=None):
|
||||
logger.info('forcerss is : %s' % forcerss)
|
||||
with rss_lock:
|
||||
|
||||
logger.info('[RSS-FEEDS] RSS Feed Check was last run at : ' + str(mylar.SCHED_RSS_LAST))
|
||||
|
@ -42,7 +41,7 @@ class tehMain():
|
|||
tstamp = float(mylar.SCHED_RSS_LAST)
|
||||
duration_diff = abs(helpers.utctimestamp() - tstamp)/60
|
||||
logger.fdebug('[RSS-FEEDS] Duration diff: %s' % duration_diff)
|
||||
if firstrun == "no" and duration_diff < int(mylar.RSS_CHECKINTERVAL):
|
||||
if firstrun == "no" and duration_diff < int(mylar.CONFIG.RSS_CHECKINTERVAL):
|
||||
logger.fdebug('[RSS-FEEDS] RSS Check has taken place less than the threshold - not initiating at this time.')
|
||||
return
|
||||
|
||||
|
@ -51,31 +50,33 @@ class tehMain():
|
|||
logger.fdebug('[RSS-FEEDS] Updated RSS Run time to : ' + str(mylar.SCHED_RSS_LAST))
|
||||
|
||||
#function for looping through nzbs/torrent feeds
|
||||
if mylar.ENABLE_TORRENT_SEARCH:
|
||||
if mylar.CONFIG.ENABLE_TORRENT_SEARCH:
|
||||
logger.info('[RSS-FEEDS] Initiating Torrent RSS Check.')
|
||||
if mylar.ENABLE_TPSE:
|
||||
if mylar.CONFIG.ENABLE_TPSE:
|
||||
logger.info('[RSS-FEEDS] Initiating Torrent RSS Feed Check on TorrentProject.')
|
||||
#rsscheck.torrents(pickfeed='3') #TP.SE RSS Check (has to be page-parsed)
|
||||
rsscheck.torrents(pickfeed='TPSE') #TPSE = DEM RSS Check + WWT RSS Check
|
||||
if mylar.ENABLE_32P:
|
||||
if mylar.CONFIG.ENABLE_32P:
|
||||
logger.info('[RSS-FEEDS] Initiating Torrent RSS Feed Check on 32P.')
|
||||
if mylar.MODE_32P == 0:
|
||||
if mylar.CONFIG.MODE_32P == 0:
|
||||
logger.fdebug('[RSS-FEEDS] 32P mode set to Legacy mode. Monitoring New Releases feed only.')
|
||||
if any([mylar.PASSKEY_32P is None, mylar.PASSKEY_32P == '', mylar.RSSFEED_32P is None, mylar.RSSFEED_32P == '']):
|
||||
if any([mylar.CONFIG.PASSKEY_32P is None, mylar.CONFIG.PASSKEY_32P == '', mylar.CONFIG.RSSFEED_32P is None, mylar.CONFIG.RSSFEED_32P == '']):
|
||||
logger.error('[RSS-FEEDS] Unable to validate information from provided RSS Feed. Verify that the feed provided is a current one.')
|
||||
else:
|
||||
rsscheck.torrents(pickfeed='1', feedinfo=mylar.KEYS_32P)
|
||||
else:
|
||||
logger.fdebug('[RSS-FEEDS] 32P mode set to Auth mode. Monitoring all personal notification feeds & New Releases feed')
|
||||
if any([mylar.USERNAME_32P is None, mylar.USERNAME_32P == '', mylar.PASSWORD_32P is None]):
|
||||
if any([mylar.CONFIG.USERNAME_32P is None, mylar.CONFIG.USERNAME_32P == '', mylar.CONFIG.PASSWORD_32P is None]):
|
||||
logger.error('[RSS-FEEDS] Unable to sign-on to 32P to validate settings. Please enter/check your username password in the configuration.')
|
||||
else:
|
||||
if mylar.KEYS_32P is None:
|
||||
feed32p = auth32p.info32p()
|
||||
feedinfo = feed32p.authenticate()
|
||||
if feedinfo == "disable":
|
||||
mylar.ENABLE_32P = 0
|
||||
mylar.config_write()
|
||||
if feedinfo != "disable":
|
||||
pass
|
||||
else:
|
||||
mylar.CONFIG.ENABLE_32P = 0
|
||||
#mylar.config_write()
|
||||
else:
|
||||
feedinfo = mylar.FEEDINFO_32P
|
||||
|
||||
|
|
|
@ -10,10 +10,10 @@ from decimal import Decimal
|
|||
from HTMLParser import HTMLParseError
|
||||
from time import strptime
|
||||
|
||||
def sabnzbd(sabhost=mylar.SAB_HOST, sabusername=mylar.SAB_USERNAME, sabpassword=mylar.SAB_PASSWORD):
|
||||
#SAB_USERNAME = mylar.SAB_USERNAME
|
||||
#SAB_PASSWORD = mylar.SAB_PASSWORD
|
||||
#SAB_HOST = mylar.SAB_HOST #'http://localhost:8085/'
|
||||
def sabnzbd(sabhost=mylar.CONFIG.SAB_HOST, sabusername=mylar.CONFIG.SAB_USERNAME, sabpassword=mylar.CONFIG.SAB_PASSWORD):
|
||||
#SAB_USERNAME = mylar.CONFIG.SAB_USERNAME
|
||||
#SAB_PASSWORD = mylar.CONFIG.SAB_PASSWORD
|
||||
#SAB_HOST = mylar.CONFIG.SAB_HOST #'http://localhost:8085/'
|
||||
if sabusername is None or sabpassword is None:
|
||||
logger.fdebug('No Username / Password specified for SABnzbd. Unable to auto-retrieve SAB API')
|
||||
if 'https' not in sabhost:
|
||||
|
|
301
mylar/search.py
301
mylar/search.py
|
@ -38,7 +38,7 @@ from base64 import b16encode, b32decode
|
|||
from operator import itemgetter
|
||||
from wsgiref.handlers import format_date_time
|
||||
|
||||
def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, IssueID, AlternateSearch=None, UseFuzzy=None, ComicVersion=None, SARC=None, IssueArcID=None, mode=None, rsscheck=None, ComicID=None, manualsearch=None, filesafe=None, allow_packs=None):
|
||||
def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, IssueID, AlternateSearch=None, UseFuzzy=None, ComicVersion=None, SARC=None, IssueArcID=None, mode=None, rsscheck=None, ComicID=None, manualsearch=None, filesafe=None, allow_packs=None, oneoff=False):
|
||||
unaltered_ComicName = None
|
||||
if filesafe:
|
||||
if filesafe != ComicName and mode != 'want_ann':
|
||||
|
@ -76,7 +76,6 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
|
|||
ComicName = ComicName + " annual"
|
||||
if AlternateSearch is not None and AlternateSearch != "None":
|
||||
AlternateSearch = AlternateSearch + " annual"
|
||||
oneoff = False
|
||||
|
||||
if mode == 'pullwant' or IssueID is None:
|
||||
#one-off the download.
|
||||
|
@ -94,31 +93,31 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
|
|||
torprovider = []
|
||||
torp = 0
|
||||
logger.fdebug("Checking for torrent enabled.")
|
||||
if mylar.ENABLE_TORRENT_SEARCH: #and mylar.ENABLE_TORRENTS:
|
||||
if mylar.ENABLE_32P:
|
||||
if mylar.CONFIG.ENABLE_TORRENT_SEARCH: #and mylar.CONFIG.ENABLE_TORRENTS:
|
||||
if mylar.CONFIG.ENABLE_32P:
|
||||
torprovider.append('32p')
|
||||
torp+=1
|
||||
#print torprovider[0]
|
||||
if mylar.ENABLE_TPSE:
|
||||
if mylar.CONFIG.ENABLE_TPSE:
|
||||
torprovider.append('tpse')
|
||||
torp+=1
|
||||
if mylar.ENABLE_TORZNAB:
|
||||
if mylar.CONFIG.ENABLE_TORZNAB:
|
||||
torprovider.append('torznab')
|
||||
torp+=1
|
||||
##nzb provider selection##
|
||||
##'dognzb' or 'nzb.su' or 'experimental'
|
||||
nzbprovider = []
|
||||
nzbp = 0
|
||||
if mylar.NZBSU == 1:
|
||||
if mylar.CONFIG.NZBSU == True:
|
||||
nzbprovider.append('nzb.su')
|
||||
nzbp+=1
|
||||
if mylar.DOGNZB == 1:
|
||||
if mylar.CONFIG.DOGNZB == True:
|
||||
nzbprovider.append('dognzb')
|
||||
nzbp+=1
|
||||
|
||||
# --------
|
||||
# Xperimental
|
||||
if mylar.EXPERIMENTAL == 1:
|
||||
if mylar.CONFIG.EXPERIMENTAL == True:
|
||||
nzbprovider.append('experimental')
|
||||
nzbp+=1
|
||||
|
||||
|
@ -126,9 +125,9 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
|
|||
|
||||
newznab_hosts = []
|
||||
|
||||
if mylar.NEWZNAB == 1:
|
||||
#if len(mylar.EXTRA_NEWZNABS > 0):
|
||||
for newznab_host in mylar.EXTRA_NEWZNABS:
|
||||
if mylar.CONFIG.NEWZNAB is True:
|
||||
for newznab_host in mylar.CONFIG.EXTRA_NEWZNABS:
|
||||
logger.info(newznab_host)
|
||||
if newznab_host[5] == '1' or newznab_host[5] == 1:
|
||||
newznab_hosts.append(newznab_host)
|
||||
#if newznab_host[0] == newznab_host[1]:
|
||||
|
@ -138,7 +137,6 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
|
|||
newznabs+=1
|
||||
logger.fdebug("newznab name:" + str(newznab_host[0]) + " @ " + str(newznab_host[1]))
|
||||
|
||||
#logger.fdebug('newznab hosts: ' + str(newznab_hosts))
|
||||
logger.fdebug('nzbprovider(s): ' + str(nzbprovider))
|
||||
# --------
|
||||
logger.fdebug("there are : " + str(torp) + " torrent providers you have selected.")
|
||||
|
@ -147,7 +145,7 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
|
|||
torpr = -1
|
||||
providercount = int(nzbp + newznabs)
|
||||
logger.fdebug("there are : " + str(providercount) + " nzb providers you have selected.")
|
||||
logger.fdebug("Usenet Retention : " + str(mylar.USENET_RETENTION) + " days")
|
||||
logger.fdebug("Usenet Retention : " + str(mylar.CONFIG.USENET_RETENTION) + " days")
|
||||
#nzbpr = providercount - 1
|
||||
#if nzbpr < 0:
|
||||
# nzbpr == 0
|
||||
|
@ -177,12 +175,12 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
|
|||
i = 1
|
||||
|
||||
if rsscheck:
|
||||
if mylar.ENABLE_RSS:
|
||||
if mylar.CONFIG.ENABLE_RSS:
|
||||
searchcnt = 1 # rss-only
|
||||
else:
|
||||
searchcnt = 0 # if it's not enabled, don't even bother.
|
||||
else:
|
||||
if mylar.ENABLE_RSS:
|
||||
if mylar.CONFIG.ENABLE_RSS:
|
||||
searchcnt = 2 # rss first, then api on non-matches
|
||||
else:
|
||||
searchcnt = 2 #set the searchcnt to 2 (api)
|
||||
|
@ -234,7 +232,7 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
|
|||
newznab_host = None
|
||||
searchprov = prov_order[prov_count].lower()
|
||||
|
||||
if searchprov == 'dognzb' and mylar.DOGNZB == 0:
|
||||
if searchprov == 'dognzb' and mylar.CONFIG.DOGNZB == 0:
|
||||
#since dognzb could hit the 50 daily api limit during the middle of a search run, check here on each pass to make
|
||||
#sure it's not disabled (it gets auto-disabled on maxing out the API hits)
|
||||
prov_count+=1
|
||||
|
@ -288,7 +286,7 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
|
|||
if findit['status'] is True:
|
||||
#check for snatched_havetotal being enabled here and adjust counts now.
|
||||
#IssueID being the catch/check for one-offs as they won't exist on the watchlist and error out otherwise.
|
||||
if mylar.SNATCHED_HAVETOTAL and any([oneoff is False, IssueID is not None]):
|
||||
if mylar.CONFIG.SNATCHED_HAVETOTAL and any([oneoff is False, IssueID is not None]):
|
||||
logger.fdebug('Adding this to the HAVE total for the series.')
|
||||
helpers.incr_snatched(ComicID)
|
||||
if searchprov == 'TPSE' and mylar.TMP_PROV != searchprov:
|
||||
|
@ -302,9 +300,9 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
|
|||
else:
|
||||
logger.fdebug('Could not find issue doing a manual search via : ' + str(searchmode))
|
||||
if searchprov == '32P':
|
||||
if mylar.MODE_32P == 0:
|
||||
if mylar.CONFIG.MODE_32P == 0:
|
||||
return findit, 'None'
|
||||
elif mylar.MODE_32P == 1 and searchmode == 'api':
|
||||
elif mylar.CONFIG.MODE_32P == 1 and searchmode == 'api':
|
||||
return findit, 'None'
|
||||
i+=1
|
||||
|
||||
|
@ -312,19 +310,19 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
|
|||
|
||||
def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, nzbprov, prov_count, IssDateFix, IssueID, UseFuzzy, newznab_host=None, ComicVersion=None, SARC=None, IssueArcID=None, RSS=None, ComicID=None, issuetitle=None, unaltered_ComicName=None, allow_packs=None, oneoff=False):
|
||||
|
||||
if any([allow_packs is None, allow_packs == 'None', allow_packs == 0, allow_packs == '0']) and all([mylar.ENABLE_TORRENT_SEARCH, mylar.ENABLE_32P]):
|
||||
if any([allow_packs is None, allow_packs == 'None', allow_packs == 0, allow_packs == '0']) and all([mylar.CONFIG.ENABLE_TORRENT_SEARCH, mylar.CONFIG.ENABLE_32P]):
|
||||
allow_packs = False
|
||||
elif any([allow_packs == 1, allow_packs == '1']) and all([mylar.ENABLE_TORRENT_SEARCH, mylar.ENABLE_32P]):
|
||||
elif any([allow_packs == 1, allow_packs == '1']) and all([mylar.CONFIG.ENABLE_TORRENT_SEARCH, mylar.CONFIG.ENABLE_32P]):
|
||||
allow_packs = True
|
||||
|
||||
newznab_local = False
|
||||
|
||||
if nzbprov == 'nzb.su':
|
||||
apikey = mylar.NZBSU_APIKEY
|
||||
verify = bool(mylar.NZBSU_VERIFY)
|
||||
apikey = mylar.CONFIG.NZBSU_APIKEY
|
||||
verify = bool(mylar.CONFIG.NZBSU_VERIFY)
|
||||
elif nzbprov == 'dognzb':
|
||||
apikey = mylar.DOGNZB_APIKEY
|
||||
verify = bool(mylar.DOGNZB_VERIFY)
|
||||
apikey = mylar.CONFIG.DOGNZB_APIKEY
|
||||
verify = bool(mylar.CONFIG.DOGNZB_VERIFY)
|
||||
elif nzbprov == 'experimental':
|
||||
apikey = 'none'
|
||||
verify = False
|
||||
|
@ -364,9 +362,9 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
|
|||
|
||||
|
||||
#this will completely render the api search results empty. Needs to get fixed.
|
||||
if mylar.PREFERRED_QUALITY == 0: filetype = ""
|
||||
elif mylar.PREFERRED_QUALITY == 1: filetype = ".cbr"
|
||||
elif mylar.PREFERRED_QUALITY == 2: filetype = ".cbz"
|
||||
if mylar.CONFIG.PREFERRED_QUALITY == 0: filetype = ""
|
||||
elif mylar.CONFIG.PREFERRED_QUALITY == 1: filetype = ".cbr"
|
||||
elif mylar.CONFIG.PREFERRED_QUALITY == 2: filetype = ".cbz"
|
||||
|
||||
#UseFuzzy == 0: Normal
|
||||
#UseFuzzy == 1: Remove Year
|
||||
|
@ -491,7 +489,7 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
|
|||
while (findloop < findcount):
|
||||
#logger.fdebug('findloop: ' + str(findloop) + ' / findcount: ' + str(findcount))
|
||||
comsrc = comsearch
|
||||
if nzbprov == 'dognzb' and not mylar.DOGNZB:
|
||||
if nzbprov == 'dognzb' and not mylar.CONFIG.DOGNZB:
|
||||
foundc['status'] = False
|
||||
done = True
|
||||
break
|
||||
|
@ -547,7 +545,7 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
|
|||
if nzbprov == '':
|
||||
bb = "no results"
|
||||
if nzbprov == '32P':
|
||||
if all([mylar.MODE_32P == 1,mylar.ENABLE_32P]):
|
||||
if all([mylar.CONFIG.MODE_32P == 1,mylar.CONFIG.ENABLE_32P]):
|
||||
searchterm = {'series': ComicName, 'id': ComicID, 'issue': findcomiciss, 'volume': ComicVersion, 'publisher': Publisher}
|
||||
#first we find the id on the serieslist of 32P
|
||||
#then we call the ajax against the id and issue# and volume (if exists)
|
||||
|
@ -575,14 +573,14 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
|
|||
else: host_newznab_fix = host_newznab
|
||||
findurl = str(host_newznab_fix) + "api?t=search&q=" + str(comsearch) + "&o=xml&cat=" + str(category_newznab)
|
||||
elif nzbprov == 'Torznab':
|
||||
if mylar.TORZNAB_HOST.endswith('/'):
|
||||
if mylar.CONFIG.TORZNAB_HOST.endswith('/'):
|
||||
#http://localhost:9117/api/iptorrents
|
||||
torznab_fix = mylar.TORZNAB_HOST[:-1]
|
||||
torznab_fix = mylar.CONFIG.TORZNAB_HOST[:-1]
|
||||
else:
|
||||
torznab_fix = mylar.TORZNAB_HOST
|
||||
torznab_fix = mylar.CONFIG.TORZNAB_HOST
|
||||
findurl = str(torznab_fix) + "?t=search&q=" + str(comsearch)
|
||||
if str(mylar.TORZNAB_CATEGORY): findurl += "&cat=" + str(mylar.TORZNAB_CATEGORY)
|
||||
apikey = mylar.TORZNAB_APIKEY
|
||||
if str(mylar.CONFIG.TORZNAB_CATEGORY): findurl += "&cat=" + str(mylar.CONFIG.TORZNAB_CATEGORY)
|
||||
apikey = mylar.CONFIG.TORZNAB_APIKEY
|
||||
else:
|
||||
logger.warn('You have a blank newznab entry within your configuration. Remove it, save the config and restart mylar to fix things. Skipping this blank provider until fixed.')
|
||||
findurl = None
|
||||
|
@ -595,15 +593,15 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
|
|||
|
||||
### IF USENET_RETENTION is set, honour it
|
||||
### For newznab sites, that means appending "&maxage=<whatever>" on the URL
|
||||
if mylar.USENET_RETENTION != None and nzbprov != 'torznab':
|
||||
findurl = findurl + "&maxage=" + str(mylar.USENET_RETENTION)
|
||||
if mylar.CONFIG.USENET_RETENTION != None and nzbprov != 'torznab':
|
||||
findurl = findurl + "&maxage=" + str(mylar.CONFIG.USENET_RETENTION)
|
||||
|
||||
#set a delay between searches here. Default is for 60 seconds...
|
||||
#changing this to lower could result in a ban from your nzb source due to hammering.
|
||||
if mylar.SEARCH_DELAY == 'None' or mylar.SEARCH_DELAY is None:
|
||||
if mylar.CONFIG.SEARCH_DELAY == 'None' or mylar.CONFIG.SEARCH_DELAY is None:
|
||||
pause_the_search = 60 # (it's in seconds)
|
||||
elif str(mylar.SEARCH_DELAY).isdigit():
|
||||
pause_the_search = int(mylar.SEARCH_DELAY) * 60
|
||||
elif str(mylar.CONFIG.SEARCH_DELAY).isdigit():
|
||||
pause_the_search = int(mylar.CONFIG.SEARCH_DELAY) * 60
|
||||
else:
|
||||
logger.info("Check Search Delay - invalid numerical given. Force-setting to 1 minute.")
|
||||
pause_the_search = 60
|
||||
|
@ -681,7 +679,7 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
|
|||
logger.error('[ERROR CODE: ' + str(bb['feed']['error']['code']) + '] ' + str(bb['feed']['error']['description']))
|
||||
if bb['feed']['error']['code'] == '910':
|
||||
logger.warn('DAILY API limit reached. Disabling provider usage until 12:01am')
|
||||
mylar.DOGNZB = 0
|
||||
mylar.CONFIG.DOGNZB = 0
|
||||
foundc['status'] = False
|
||||
done = True
|
||||
else:
|
||||
|
@ -778,13 +776,13 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
|
|||
format_type = 'cbz'
|
||||
else:
|
||||
format_type = 'unknown'
|
||||
if mylar.PREFERRED_QUALITY == 1:
|
||||
if mylar.CONFIG.PREFERRED_QUALITY == 1:
|
||||
if format_type == 'cbr':
|
||||
logger.fdebug('Quality restriction enforced [ .cbr only ]. Accepting result.')
|
||||
else:
|
||||
logger.fdebug('Quality restriction enforced [ .cbr only ]. Rejecting this result.')
|
||||
continue
|
||||
elif mylar.PREFERRED_QUALITY == 2:
|
||||
elif mylar.CONFIG.PREFERRED_QUALITY == 2:
|
||||
if format_type == 'cbz':
|
||||
logger.fdebug('Quality restriction enforced [ .cbz only ]. Accepting result.')
|
||||
else:
|
||||
|
@ -799,14 +797,14 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
|
|||
logger.fdebug("size given as: " + str(comsize_m))
|
||||
#----size constraints.
|
||||
#if it's not within size constaints - dump it now and save some time.
|
||||
if mylar.USE_MINSIZE:
|
||||
conv_minsize = helpers.human2bytes(mylar.MINSIZE + "M")
|
||||
if mylar.CONFIG.USE_MINSIZE:
|
||||
conv_minsize = helpers.human2bytes(mylar.CONFIG.MINSIZE + "M")
|
||||
logger.fdebug("comparing Min threshold " + str(conv_minsize) + " .. to .. nzb " + str(comsize_b))
|
||||
if int(conv_minsize) > int(comsize_b):
|
||||
logger.fdebug("Failure to meet the Minimum size threshold - skipping")
|
||||
continue
|
||||
if mylar.USE_MAXSIZE:
|
||||
conv_maxsize = helpers.human2bytes(mylar.MAXSIZE + "M")
|
||||
if mylar.CONFIG.USE_MAXSIZE:
|
||||
conv_maxsize = helpers.human2bytes(mylar.CONFIG.MAXSIZE + "M")
|
||||
logger.fdebug("comparing Max threshold " + str(conv_maxsize) + " .. to .. nzb " + str(comsize_b))
|
||||
if int(comsize_b) > int(conv_maxsize):
|
||||
logger.fdebug("Failure to meet the Maximium size threshold - skipping")
|
||||
|
@ -1700,20 +1698,20 @@ def searchforissue(issueid=None, new=False, rsscheck=None):
|
|||
if not issueid or rsscheck:
|
||||
|
||||
if rsscheck:
|
||||
logger.info(u"Initiating RSS Search Scan at the scheduled interval of " + str(mylar.RSS_CHECKINTERVAL) + " minutes.")
|
||||
logger.info(u"Initiating RSS Search Scan at the scheduled interval of " + str(mylar.CONFIG.RSS_CHECKINTERVAL) + " minutes.")
|
||||
else:
|
||||
logger.info(u"Initiating Search scan at the scheduled interval of " + str(mylar.SEARCH_INTERVAL) + " minutes.")
|
||||
logger.info(u"Initiating Search scan at the scheduled interval of " + str(mylar.CONFIG.SEARCH_INTERVAL) + " minutes.")
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
stloop = 1
|
||||
results = []
|
||||
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
stloop+=1
|
||||
while (stloop > 0):
|
||||
if stloop == 1:
|
||||
if mylar.FAILED_DOWNLOAD_HANDLING and mylar.FAILED_AUTO:
|
||||
if mylar.CONFIG.FAILED_DOWNLOAD_HANDLING and mylar.CONFIG.FAILED_AUTO:
|
||||
issues_1 = myDB.select('SELECT * from issues WHERE Status="Wanted" OR Status="Failed"')
|
||||
else:
|
||||
issues_1 = myDB.select('SELECT * from issues WHERE Status="Wanted"')
|
||||
|
@ -1726,7 +1724,7 @@ def searchforissue(issueid=None, new=False, rsscheck=None):
|
|||
'mode': 'want'
|
||||
})
|
||||
elif stloop == 2:
|
||||
if mylar.FAILED_DOWNLOAD_HANDLING and mylar.FAILED_AUTO:
|
||||
if mylar.CONFIG.FAILED_DOWNLOAD_HANDLING and mylar.CONFIG.FAILED_AUTO:
|
||||
issues_2 = myDB.select('SELECT * from annuals WHERE Status="Wanted" OR Status="Failed"')
|
||||
else:
|
||||
issues_2 = myDB.select('SELECT * from annuals WHERE Status="Wanted"')
|
||||
|
@ -1781,7 +1779,10 @@ def searchforissue(issueid=None, new=False, rsscheck=None):
|
|||
else:
|
||||
AllowPacks = False
|
||||
mode = result['mode']
|
||||
if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.ENABLE_TPSE or mylar.ENABLE_32P or mylar.ENABLE_TORZNAB) and (mylar.USE_SABNZBD or mylar.USE_NZBGET or mylar.ENABLE_TORRENTS or mylar.USE_BLACKHOLE):
|
||||
logger.info('preparing to fire..')
|
||||
|
||||
if (mylar.CONFIG.NZBSU or mylar.CONFIG.DOGNZB or mylar.CONFIG.EXPERIMENTAL or mylar.CONFIG.NEWZNAB or mylar.CONFIG.ENABLE_TPSE or mylar.CONFIG.ENABLE_32P or mylar.CONFIG.ENABLE_TORZNAB) and (mylar.USE_SABNZBD or mylar.USE_NZBGET or mylar.CONFIG.ENABLE_TORRENTS or mylar.USE_BLACKHOLE):
|
||||
logger.info('fired off')
|
||||
foundNZB, prov = search_init(comic['ComicName'], result['Issue_Number'], str(ComicYear), comic['ComicYear'], Publisher, IssueDate, StoreDate, result['IssueID'], AlternateSearch, UseFuzzy, ComicVersion, SARC=None, IssueArcID=None, mode=mode, rsscheck=rsscheck, ComicID=result['ComicID'], filesafe=comic['ComicName_Filesafe'], allow_packs=AllowPacks)
|
||||
if foundNZB['status'] is True:
|
||||
logger.info(foundNZB)
|
||||
|
@ -1822,7 +1823,7 @@ def searchforissue(issueid=None, new=False, rsscheck=None):
|
|||
AllowPacks = False
|
||||
|
||||
foundNZB = "none"
|
||||
if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.ENABLE_TPSE or mylar.ENABLE_32P or mylar.ENABLE_TORZNAB) and (mylar.USE_SABNZBD or mylar.USE_NZBGET or mylar.ENABLE_TORRENTS or mylar.USE_BLACKHOLE):
|
||||
if (mylar.CONFIG.NZBSU or mylar.CONFIG.DOGNZB or mylar.CONFIG.EXPERIMENTAL or mylar.CONFIG.NEWZNAB or mylar.CONFIG.ENABLE_TPSE or mylar.CONFIG.ENABLE_32P or mylar.CONFIG.ENABLE_TORZNAB) and (mylar.USE_SABNZBD or mylar.USE_NZBGET or mylar.CONFIG.ENABLE_TORRENTS or mylar.USE_BLACKHOLE):
|
||||
foundNZB, prov = search_init(comic['ComicName'], result['Issue_Number'], str(IssueYear), comic['ComicYear'], Publisher, IssueDate, StoreDate, result['IssueID'], AlternateSearch, UseFuzzy, ComicVersion, SARC=None, IssueArcID=None, mode=mode, rsscheck=rsscheck, ComicID=result['ComicID'], filesafe=comic['ComicName_Filesafe'], allow_packs=AllowPacks)
|
||||
if foundNZB['status'] is True:
|
||||
logger.fdebug("I found " + comic['ComicName'] + ' #:' + str(result['Issue_Number']))
|
||||
|
@ -1861,7 +1862,7 @@ def searchIssueIDList(issuelist):
|
|||
else:
|
||||
AllowPacks = False
|
||||
|
||||
if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.ENABLE_32P or mylar.ENABLE_TPSE or mylar.ENABLE_TORZNAB) and (mylar.USE_SABNZBD or mylar.USE_NZBGET or mylar.ENABLE_TORRENTS or mylar.USE_BLACKHOLE):
|
||||
if (mylar.CONFIG.NZBSU or mylar.CONFIG.DOGNZB or mylar.CONFIG.EXPERIMENTAL or mylar.CONFIG.NEWZNAB or mylar.CONFIG.ENABLE_32P or mylar.CONFIG.ENABLE_TPSE or mylar.CONFIG.ENABLE_TORZNAB) and (mylar.USE_SABNZBD or mylar.USE_NZBGET or mylar.CONFIG.ENABLE_TORRENTS or mylar.USE_BLACKHOLE):
|
||||
foundNZB, prov = search_init(comic['ComicName'], issue['Issue_Number'], str(IssueYear), comic['ComicYear'], Publisher, issue['IssueDate'], issue['ReleaseDate'], issue['IssueID'], AlternateSearch, UseFuzzy, ComicVersion, SARC=None, IssueArcID=None, mode=mode, ComicID=issue['ComicID'], filesafe=comic['ComicName_Filesafe'], allow_packs=AllowPacks)
|
||||
if foundNZB['status'] is True:
|
||||
updater.foundsearch(ComicID=issue['ComicID'], IssueID=issue['IssueID'], mode=mode, provider=prov, hash=foundNZB['info']['t_hash'])
|
||||
|
@ -1874,12 +1875,16 @@ def provider_sequence(nzbprovider, torprovider, newznab_hosts):
|
|||
prov_order = []
|
||||
|
||||
nzbproviders_lower = [x.lower() for x in nzbprovider]
|
||||
print nzbprovider
|
||||
print mylar.CONFIG.PROVIDER_ORDER
|
||||
|
||||
if len(mylar.PROVIDER_ORDER) > 0:
|
||||
for pr_order in mylar.PROVIDER_ORDER:
|
||||
#logger.fdebug('looking for ' + str(pr_order[1]).lower())
|
||||
#logger.fdebug('nzbproviders ' + str(nzbproviders_lower))
|
||||
#logger.fdebug('torproviders ' + str(torprovider))
|
||||
print len(mylar.CONFIG.PROVIDER_ORDER)
|
||||
if len(mylar.CONFIG.PROVIDER_ORDER) > 0:
|
||||
for pr_order in sorted(mylar.CONFIG.PROVIDER_ORDER.items(), key=itemgetter(0), reverse=False):
|
||||
print pr_order
|
||||
logger.fdebug('looking for ' + str(pr_order[1]).lower())
|
||||
logger.fdebug('nzbproviders ' + str(nzbproviders_lower))
|
||||
logger.fdebug('torproviders ' + str(torprovider))
|
||||
if (pr_order[1].lower() in torprovider) or any(pr_order[1].lower() in x for x in nzbproviders_lower):
|
||||
logger.fdebug('found provider in existing enabled providers.')
|
||||
if any(pr_order[1].lower() in x for x in nzbproviders_lower):
|
||||
|
@ -1925,7 +1930,7 @@ def nzbname_create(provider, title=None, info=None):
|
|||
nzbname = None
|
||||
|
||||
if mylar.USE_BLACKHOLE and all([provider != '32P', provider != 'TPSE', provider != 'WWT', provider != 'DEM']):
|
||||
if os.path.exists(mylar.BLACKHOLE_DIR):
|
||||
if os.path.exists(mylar.CONFIG.BLACKHOLE_DIR):
|
||||
#load in the required info to generate the nzb names when required (blackhole only)
|
||||
ComicName = info[0]['ComicName']
|
||||
IssueNumber = info[0]['IssueNumber']
|
||||
|
@ -1996,23 +2001,23 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
|
|||
oneoff = comicinfo[0]['oneoff']
|
||||
|
||||
#setup the priorities.
|
||||
if mylar.SAB_PRIORITY:
|
||||
if mylar.SAB_PRIORITY == "Default": sabpriority = "-100"
|
||||
elif mylar.SAB_PRIORITY == "Low": sabpriority = "-1"
|
||||
elif mylar.SAB_PRIORITY == "Normal": sabpriority = "0"
|
||||
elif mylar.SAB_PRIORITY == "High": sabpriority = "1"
|
||||
elif mylar.SAB_PRIORITY == "Paused": sabpriority = "-2"
|
||||
if mylar.CONFIG.SAB_PRIORITY:
|
||||
if mylar.CONFIG.SAB_PRIORITY == "Default": sabpriority = "-100"
|
||||
elif mylar.CONFIG.SAB_PRIORITY == "Low": sabpriority = "-1"
|
||||
elif mylar.CONFIG.SAB_PRIORITY == "Normal": sabpriority = "0"
|
||||
elif mylar.CONFIG.SAB_PRIORITY == "High": sabpriority = "1"
|
||||
elif mylar.CONFIG.SAB_PRIORITY == "Paused": sabpriority = "-2"
|
||||
else:
|
||||
#if sab priority isn't selected, default to Normal (0)
|
||||
sabpriority = "0"
|
||||
|
||||
if mylar.NZBGET_PRIORITY:
|
||||
if mylar.NZBGET_PRIORITY == "Default": nzbgetpriority = "0"
|
||||
elif mylar.NZBGET_PRIORITY == "Low": nzbgetpriority = "-50"
|
||||
elif mylar.NZBGET_PRIORITY == "Normal": nzbgetpriority = "0"
|
||||
elif mylar.NZBGET_PRIORITY == "High": nzbgetpriority = "50"
|
||||
if mylar.CONFIG.NZBGET_PRIORITY:
|
||||
if mylar.CONFIG.NZBGET_PRIORITY == "Default": nzbgetpriority = "0"
|
||||
elif mylar.CONFIG.NZBGET_PRIORITY == "Low": nzbgetpriority = "-50"
|
||||
elif mylar.CONFIG.NZBGET_PRIORITY == "Normal": nzbgetpriority = "0"
|
||||
elif mylar.CONFIG.NZBGET_PRIORITY == "High": nzbgetpriority = "50"
|
||||
#there's no priority for "paused", so set "Very Low" and deal with that later...
|
||||
elif mylar.NZBGET_PRIORITY == "Paused": nzbgetpriority = "-100"
|
||||
elif mylar.CONFIG.NZBGET_PRIORITY == "Paused": nzbgetpriority = "-100"
|
||||
else:
|
||||
#if sab priority isn't selected, default to Normal (0)
|
||||
nzbgetpriority = "0"
|
||||
|
@ -2036,7 +2041,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
|
|||
|
||||
logger.fdebug("link given by: " + str(nzbprov))
|
||||
|
||||
if mylar.FAILED_DOWNLOAD_HANDLING:
|
||||
if mylar.CONFIG.FAILED_DOWNLOAD_HANDLING:
|
||||
if all([nzbid is not None, IssueID is not None, oneoff is False]):
|
||||
# --- this causes any possible snatch to get marked as a Failed download when doing a one-off search...
|
||||
#try:
|
||||
|
@ -2090,8 +2095,8 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
|
|||
verify = bool(newznab[2])
|
||||
else:
|
||||
down_url = 'https://api.nzb.su/api'
|
||||
apikey = mylar.NZBSU_APIKEY
|
||||
verify = bool(mylar.NZBSU_VERIFY)
|
||||
apikey = mylar.CONFIG.NZBSU_APIKEY
|
||||
verify = bool(mylar.CONFIG.NZBSU_VERIFY)
|
||||
|
||||
if nzbmega == True:
|
||||
down_url = link
|
||||
|
@ -2103,8 +2108,8 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
|
|||
|
||||
elif nzbprov == 'dognzb':
|
||||
#dognzb - need to add back in the dog apikey
|
||||
down_url = urljoin(link, str(mylar.DOGNZB_APIKEY))
|
||||
verify = bool(mylar.DOGNZB_VERIFY)
|
||||
down_url = urljoin(link, str(mylar.CONFIG.DOGNZB_APIKEY))
|
||||
verify = bool(mylar.CONFIG.DOGNZB_VERIFY)
|
||||
|
||||
else:
|
||||
#experimental - direct link.
|
||||
|
@ -2182,26 +2187,26 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
|
|||
logger.info('filen: ' + filen + ' -- nzbname: ' + nzbname + ' are not identical. Storing extra value as : ' + alt_nzbname)
|
||||
|
||||
#make sure the cache directory exists - if not, create it (used for storing nzbs).
|
||||
if os.path.exists(mylar.CACHE_DIR):
|
||||
if mylar.ENFORCE_PERMS:
|
||||
logger.fdebug("Cache Directory successfully found at : " + mylar.CACHE_DIR + ". Ensuring proper permissions.")
|
||||
if os.path.exists(mylar.CONFIG.CACHE_DIR):
|
||||
if mylar.CONFIG.ENFORCE_PERMS:
|
||||
logger.fdebug("Cache Directory successfully found at : " + mylar.CONFIG.CACHE_DIR + ". Ensuring proper permissions.")
|
||||
#enforce the permissions here to ensure the lower portion writes successfully
|
||||
filechecker.setperms(mylar.CACHE_DIR, True)
|
||||
filechecker.setperms(mylar.CONFIG.CACHE_DIR, True)
|
||||
else:
|
||||
logger.fdebug("Cache Directory successfully found at : " + mylar.CACHE_DIR)
|
||||
logger.fdebug("Cache Directory successfully found at : " + mylar.CONFIG.CACHE_DIR)
|
||||
else:
|
||||
#let's make the dir.
|
||||
logger.fdebug("Could not locate Cache Directory, attempting to create at : " + mylar.CACHE_DIR)
|
||||
logger.fdebug("Could not locate Cache Directory, attempting to create at : " + mylar.CONFIG.CACHE_DIR)
|
||||
try:
|
||||
filechecker.validateAndCreateDirectory(mylar.CACHE_DIR, True)
|
||||
logger.info("Temporary NZB Download Directory successfully created at: " + mylar.CACHE_DIR)
|
||||
filechecker.validateAndCreateDirectory(mylar.CONFIG.CACHE_DIR, True)
|
||||
logger.info("Temporary NZB Download Directory successfully created at: " + mylar.CONFIG.CACHE_DIR)
|
||||
except OSError:
|
||||
raise
|
||||
|
||||
#save the nzb grabbed, so we can bypass all the 'send-url' crap.
|
||||
if not nzbname.endswith('.nzb'):
|
||||
nzbname = nzbname + '.nzb'
|
||||
nzbpath = os.path.join(mylar.CACHE_DIR, nzbname)
|
||||
nzbpath = os.path.join(mylar.CONFIG.CACHE_DIR, nzbname)
|
||||
|
||||
with open(nzbpath, 'wb') as f:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
|
@ -2213,19 +2218,19 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
|
|||
sent_to = None
|
||||
t_hash = None
|
||||
if mylar.USE_BLACKHOLE and all([nzbprov != '32P', nzbprov != 'TPSE', nzbprov != 'WWT', nzbprov != 'DEM', nzbprov != 'Torznab']):
|
||||
logger.fdebug("using blackhole directory at : " + str(mylar.BLACKHOLE_DIR))
|
||||
if os.path.exists(mylar.BLACKHOLE_DIR):
|
||||
logger.fdebug("using blackhole directory at : " + str(mylar.CONFIG.BLACKHOLE_DIR))
|
||||
if os.path.exists(mylar.CONFIG.BLACKHOLE_DIR):
|
||||
#copy the nzb from nzbpath to blackhole dir.
|
||||
try:
|
||||
shutil.move(nzbpath, os.path.join(mylar.BLACKHOLE_DIR, nzbname))
|
||||
shutil.move(nzbpath, os.path.join(mylar.CONFIG.BLACKHOLE_DIR, nzbname))
|
||||
except (OSError, IOError):
|
||||
logger.warn('Failed to move nzb into blackhole directory - check blackhole directory and/or permissions.')
|
||||
return "blackhole-fail"
|
||||
logger.fdebug("filename saved to your blackhole as : " + nzbname)
|
||||
logger.info(u"Successfully sent .nzb to your Blackhole directory : " + os.path.join(mylar.BLACKHOLE_DIR, nzbname))
|
||||
logger.info(u"Successfully sent .nzb to your Blackhole directory : " + os.path.join(mylar.CONFIG.BLACKHOLE_DIR, nzbname))
|
||||
sent_to = "your Blackhole Directory"
|
||||
|
||||
if mylar.ENABLE_SNATCH_SCRIPT:
|
||||
if mylar.CONFIG.ENABLE_SNATCH_SCRIPT:
|
||||
if comicinfo[0]['pack'] is False:
|
||||
pnumbers = None
|
||||
plist = None
|
||||
|
@ -2236,7 +2241,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
|
|||
'id': nzbid,
|
||||
'nzbname': nzbname,
|
||||
'nzbpath': nzbpath,
|
||||
'blackhole': mylar.BLACKHOLE_DIR},
|
||||
'blackhole': mylar.CONFIG.BLACKHOLE_DIR},
|
||||
'comicinfo': {'comicname': ComicName,
|
||||
'volume': comicinfo[0]['ComicVolume'],
|
||||
'comicid': ComicID,
|
||||
|
@ -2265,7 +2270,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
|
|||
|
||||
rcheck = rsscheck.torsend2client(ComicName, IssueNumber, comyear, link, nzbprov, nzbid) #nzbid = hash for usage with public torrents
|
||||
if rcheck == "fail":
|
||||
if mylar.FAILED_DOWNLOAD_HANDLING:
|
||||
if mylar.CONFIG.FAILED_DOWNLOAD_HANDLING:
|
||||
logger.error('Unable to send torrent to client. Assuming incomplete link - sending to Failed Handler and continuing search.')
|
||||
if any([oneoff is True, IssueID is None]):
|
||||
logger.fdebug('One-off mode was initiated - Failed Download handling for : ' + ComicName + ' #' + str(IssueNumber))
|
||||
|
@ -2287,12 +2292,12 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
|
|||
#torrent_info{'folder','name',['total_filesize','label','hash','files','time_started'}
|
||||
t_hash = rcheck['hash']
|
||||
|
||||
if any([mylar.USE_RTORRENT, mylar.USE_DELUGE]) and mylar.AUTO_SNATCH:
|
||||
if any([mylar.USE_RTORRENT, mylar.USE_DELUGE]) and mylar.CONFIG.AUTO_SNATCH:
|
||||
mylar.SNATCHED_QUEUE.put(rcheck['hash'])
|
||||
elif any([mylar.USE_RTORRENT, mylar.USE_DELUGE]) and mylar.LOCAL_TORRENT_PP:
|
||||
elif any([mylar.USE_RTORRENT, mylar.USE_DELUGE]) and mylar.CONFIG.LOCAL_TORRENT_PP:
|
||||
mylar.SNATCHED_QUEUE.put(rcheck['hash'])
|
||||
else:
|
||||
if mylar.ENABLE_SNATCH_SCRIPT:
|
||||
if mylar.CONFIG.ENABLE_SNATCH_SCRIPT:
|
||||
if comicinfo[0]['pack'] is False:
|
||||
pnumbers = None
|
||||
plist = None
|
||||
|
@ -2321,7 +2326,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
|
|||
logger.info('Could not Successfully submit on-grab script as requested. Please check logs...')
|
||||
|
||||
if mylar.USE_WATCHDIR:
|
||||
if mylar.TORRENT_LOCAL:
|
||||
if mylar.CONFIG.TORRENT_LOCAL:
|
||||
sent_to = "your local Watch folder"
|
||||
else:
|
||||
sent_to = "your seedbox Watch folder"
|
||||
|
@ -2345,12 +2350,12 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
|
|||
#nzb.get
|
||||
if mylar.USE_NZBGET:
|
||||
from xmlrpclib import ServerProxy
|
||||
if mylar.NZBGET_HOST[:5] == 'https':
|
||||
if mylar.CONFIG.NZBGET_HOST[:5] == 'https':
|
||||
tmpapi = "https://"
|
||||
nzbget_host = mylar.NZBGET_HOST[8:]
|
||||
elif mylar.NZBGET_HOST[:4] == 'http':
|
||||
nzbget_host = mylar.CONFIG.NZBGET_HOST[8:]
|
||||
elif mylar.CONFIG.NZBGET_HOST[:4] == 'http':
|
||||
tmpapi = "http://"
|
||||
nzbget_host = mylar.NZBGET_HOST[7:]
|
||||
nzbget_host = mylar.CONFIG.NZBGET_HOST[7:]
|
||||
else:
|
||||
logger.error("You have an invalid nzbget hostname specified. Exiting")
|
||||
return "nzbget-fail"
|
||||
|
@ -2361,13 +2366,13 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
|
|||
from base64 import standard_b64encode
|
||||
nzbcontent64 = standard_b64encode(nzbcontent)
|
||||
|
||||
tmpapi = str(tmpapi) + str(mylar.NZBGET_USERNAME) + ":" + str(mylar.NZBGET_PASSWORD)
|
||||
tmpapi = str(tmpapi) + str(mylar.CONFIG.NZBGET_USERNAME) + ":" + str(mylar.CONFIG.NZBGET_PASSWORD)
|
||||
tmpapi = str(tmpapi) + "@" + str(nzbget_host)
|
||||
if str(mylar.NZBGET_PORT).strip() != '':
|
||||
tmpapi += ":" + str(mylar.NZBGET_PORT)
|
||||
if str(mylar.CONFIG.NZBGET_PORT).strip() != '':
|
||||
tmpapi += ":" + str(mylar.CONFIG.NZBGET_PORT)
|
||||
tmpapi += "/xmlrpc"
|
||||
server = ServerProxy(tmpapi)
|
||||
send_to_nzbget = server.append(nzbpath, str(mylar.NZBGET_CATEGORY), int(nzbgetpriority), True, nzbcontent64)
|
||||
send_to_nzbget = server.append(nzbpath, str(mylar.CONFIG.NZBGET_CATEGORY), int(nzbgetpriority), True, nzbcontent64)
|
||||
sent_to = "NZBGet"
|
||||
if send_to_nzbget is True:
|
||||
logger.info("Successfully sent nzb to NZBGet!")
|
||||
|
@ -2379,7 +2384,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
|
|||
elif mylar.USE_SABNZBD:
|
||||
# let's build the send-to-SAB string now:
|
||||
# changed to just work with direct links now...
|
||||
tmpapi = mylar.SAB_HOST + "/api?apikey=" + mylar.SAB_APIKEY
|
||||
tmpapi = mylar.CONFIG.SAB_HOST + "/api?apikey=" + mylar.CONFIG.SAB_APIKEY
|
||||
|
||||
logger.fdebug("send-to-SAB host &api initiation string : " + str(helpers.apiremove(tmpapi, 'nzb')))
|
||||
|
||||
|
@ -2390,20 +2395,20 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
|
|||
mylar.DOWNLOAD_APIKEY = hashlib.sha224(str(random.getrandbits(256))).hexdigest()[0:32]
|
||||
|
||||
#generate the mylar host address if applicable.
|
||||
if mylar.ENABLE_HTTPS:
|
||||
if mylar.CONFIG.ENABLE_HTTPS:
|
||||
proto = 'https://'
|
||||
else:
|
||||
proto = 'http://'
|
||||
|
||||
if mylar.HTTP_ROOT is None:
|
||||
if mylar.CONFIG.HTTP_ROOT is None:
|
||||
hroot = '/'
|
||||
elif mylar.HTTP_ROOT.endswith('/'):
|
||||
hroot = mylar.HTTP_ROOT
|
||||
elif mylar.CONFIG.HTTP_ROOT.endswith('/'):
|
||||
hroot = mylar.CONFIG.HTTP_ROOT
|
||||
else:
|
||||
if mylar.HTTP_ROOT != '/':
|
||||
hroot = mylar.HTTP_ROOT + '/'
|
||||
if mylar.CONFIG.HTTP_ROOT != '/':
|
||||
hroot = mylar.CONFIG.HTTP_ROOT + '/'
|
||||
else:
|
||||
hroot = mylar.HTTP_ROOT
|
||||
hroot = mylar.CONFIG.HTTP_ROOT
|
||||
|
||||
if mylar.LOCAL_IP is None:
|
||||
#if mylar's local, get the local IP using socket.
|
||||
|
@ -2414,43 +2419,43 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
|
|||
mylar.LOCAL_IP = s.getsockname()[0]
|
||||
s.close()
|
||||
except:
|
||||
logger.warn('Unable to determine local IP. Defaulting to host address for Mylar provided as : ' + str(mylar.HTTP_HOST))
|
||||
logger.warn('Unable to determine local IP. Defaulting to host address for Mylar provided as : ' + str(mylar.CONFIG.HTTP_HOST))
|
||||
|
||||
if mylar.HOST_RETURN:
|
||||
if mylar.CONFIG.HOST_RETURN:
|
||||
#mylar has the return value already provided (easier and will work if it's right)
|
||||
if mylar.HOST_RETURN.endswith('/'):
|
||||
mylar_host = mylar.HOST_RETURN
|
||||
if mylar.CONFIG.HOST_RETURN.endswith('/'):
|
||||
mylar_host = mylar.CONFIG.HOST_RETURN
|
||||
else:
|
||||
mylar_host = mylar.HOST_RETURN + '/'
|
||||
mylar_host = mylar.CONFIG.HOST_RETURN + '/'
|
||||
|
||||
elif mylar.SAB_TO_MYLAR:
|
||||
elif mylar.CONFIG.SAB_TO_MYLAR:
|
||||
#if sab & mylar are on different machines, check to see if they are local or external IP's provided for host.
|
||||
if mylar.HTTP_HOST == 'localhost' or mylar.HTTP_HOST == '0.0.0.0' or mylar.HTTP_HOST.startswith('10.') or mylar.HTTP_HOST.startswith('192.') or mylar.HTTP_HOST.startswith('172.'):
|
||||
if mylar.CONFIG.HTTP_HOST == 'localhost' or mylar.CONFIG.HTTP_HOST == '0.0.0.0' or mylar.CONFIG.HTTP_HOST.startswith('10.') or mylar.CONFIG.HTTP_HOST.startswith('192.') or mylar.CONFIG.HTTP_HOST.startswith('172.'):
|
||||
#if mylar's local, use the local IP already assigned to LOCAL_IP.
|
||||
mylar_host = proto + str(mylar.LOCAL_IP) + ':' + str(mylar.HTTP_PORT) + hroot
|
||||
mylar_host = proto + str(mylar.LOCAL_IP) + ':' + str(mylar.CONFIG.HTTP_PORT) + hroot
|
||||
else:
|
||||
if mylar.EXT_IP is None:
|
||||
#if mylar isn't local, get the external IP using pystun.
|
||||
import stun
|
||||
sip = mylar.HTTP_HOST
|
||||
port = int(mylar.HTTP_PORT)
|
||||
sip = mylar.CONFIG.HTTP_HOST
|
||||
port = int(mylar.CONFIG.HTTP_PORT)
|
||||
try:
|
||||
nat_type, ext_ip, ext_port = stun.get_ip_info(sip,port)
|
||||
mylar_host = proto + str(ext_ip) + ':' + str(mylar.HTTP_PORT) + hroot
|
||||
mylar_host = proto + str(ext_ip) + ':' + str(mylar.CONFIG.HTTP_PORT) + hroot
|
||||
mylar.EXT_IP = ext_ip
|
||||
except:
|
||||
logger.warn('Unable to retrieve External IP - try using the host_return option in the config.ini.')
|
||||
mylar_host = proto + str(mylar.HTTP_HOST) + ':' + str(mylar.HTTP_PORT) + hroot
|
||||
mylar_host = proto + str(mylar.CONFIG.HTTP_HOST) + ':' + str(mylar.CONFIG.HTTP_PORT) + hroot
|
||||
else:
|
||||
mylar_host = proto + str(mylar.EXT_IP) + ':' + str(mylar.HTTP_PORT) + hroot
|
||||
mylar_host = proto + str(mylar.EXT_IP) + ':' + str(mylar.CONFIG.HTTP_PORT) + hroot
|
||||
|
||||
else:
|
||||
#if all else fails, drop it back to the basic host:port and try that.
|
||||
if mylar.LOCAL_IP is None:
|
||||
tmp_host = mylar.HTTP_HOST
|
||||
tmp_host = mylar.CONFIG.HTTP_HOST
|
||||
else:
|
||||
tmp_host = mylar.LOCAL_IP
|
||||
mylar_host = proto + str(tmp_host) + ':' + str(mylar.HTTP_PORT) + hroot
|
||||
mylar_host = proto + str(tmp_host) + ':' + str(mylar.CONFIG.HTTP_PORT) + hroot
|
||||
|
||||
|
||||
fileURL = mylar_host + 'api?apikey=' + mylar.DOWNLOAD_APIKEY + '&cmd=downloadNZB&nzbname=' + nzbname
|
||||
|
@ -2463,17 +2468,17 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
|
|||
|
||||
logger.fdebug("...attaching nzb via internal Mylar API: " + str(helpers.apiremove(tmpapi, '$')))
|
||||
# determine SAB priority
|
||||
if mylar.SAB_PRIORITY:
|
||||
if mylar.CONFIG.SAB_PRIORITY:
|
||||
tmpapi = tmpapi + "&priority=" + sabpriority
|
||||
logger.fdebug("...setting priority: " + str(helpers.apiremove(tmpapi, '&')))
|
||||
# if category is blank, let's adjust
|
||||
if mylar.SAB_CATEGORY:
|
||||
tmpapi = tmpapi + "&cat=" + mylar.SAB_CATEGORY
|
||||
if mylar.CONFIG.SAB_CATEGORY:
|
||||
tmpapi = tmpapi + "&cat=" + mylar.CONFIG.SAB_CATEGORY
|
||||
logger.fdebug("...attaching category: " + str(helpers.apiremove(tmpapi, '&')))
|
||||
if mylar.POST_PROCESSING: #or mylar.RENAME_FILES:
|
||||
if mylar.POST_PROCESSING_SCRIPT:
|
||||
if mylar.CONFIG.POST_PROCESSING: #or mylar.CONFIG.RENAME_FILES:
|
||||
if mylar.CONFIG.POST_PROCESSING_SCRIPT:
|
||||
#this is relative to the SABnzbd script directory (ie. no path)
|
||||
tmpapi = tmpapi + "&script=" + mylar.POST_PROCESSING_SCRIPT
|
||||
tmpapi = tmpapi + "&script=" + mylar.CONFIG.POST_PROCESSING_SCRIPT
|
||||
else:
|
||||
tmpapi = tmpapi + "&script=ComicRN.py"
|
||||
logger.fdebug("...attaching rename script: " + str(helpers.apiremove(tmpapi, '&')))
|
||||
|
@ -2502,7 +2507,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
|
|||
|
||||
sent_to = "SABnzbd+"
|
||||
logger.info(u"Successfully sent nzb file to SABnzbd")
|
||||
if mylar.ENABLE_SNATCH_SCRIPT:
|
||||
if mylar.CONFIG.ENABLE_SNATCH_SCRIPT:
|
||||
if mylar.USE_NZBGET:
|
||||
clientmode = 'nzbget'
|
||||
elif mylar.USE_SABNZBD:
|
||||
|
@ -2572,31 +2577,31 @@ def notify_snatch(nzbname, sent_to, modcomicname, comyear, IssueNumber, nzbprov)
|
|||
|
||||
snline = modcomicname + ' (' + comyear + ') - Issue #' + IssueNumber + ' snatched!'
|
||||
|
||||
if mylar.PROWL_ENABLED and mylar.PROWL_ONSNATCH:
|
||||
if mylar.CONFIG.PROWL_ENABLED and mylar.CONFIG.PROWL_ONSNATCH:
|
||||
logger.info(u"Sending Prowl notification")
|
||||
prowl = notifiers.PROWL()
|
||||
prowl.notify(nzbname, "Download started using " + sent_to)
|
||||
if mylar.NMA_ENABLED and mylar.NMA_ONSNATCH:
|
||||
if mylar.CONFIG.NMA_ENABLED and mylar.CONFIG.NMA_ONSNATCH:
|
||||
logger.info(u"Sending NMA notification")
|
||||
nma = notifiers.NMA()
|
||||
nma.notify(snline=snline, snatched_nzb=nzbname, sent_to=sent_to, prov=nzbprov)
|
||||
if mylar.PUSHOVER_ENABLED and mylar.PUSHOVER_ONSNATCH:
|
||||
if mylar.CONFIG.PUSHOVER_ENABLED and mylar.CONFIG.PUSHOVER_ONSNATCH:
|
||||
logger.info(u"Sending Pushover notification")
|
||||
pushover = notifiers.PUSHOVER()
|
||||
pushover.notify(snline, snatched_nzb=nzbname, sent_to=sent_to, prov=nzbprov)
|
||||
if mylar.BOXCAR_ENABLED and mylar.BOXCAR_ONSNATCH:
|
||||
if mylar.CONFIG.BOXCAR_ENABLED and mylar.CONFIG.BOXCAR_ONSNATCH:
|
||||
logger.info(u"Sending Boxcar notification")
|
||||
boxcar = notifiers.BOXCAR()
|
||||
boxcar.notify(snatched_nzb=nzbname, sent_to=sent_to, snline=snline)
|
||||
if mylar.PUSHBULLET_ENABLED and mylar.PUSHBULLET_ONSNATCH:
|
||||
if mylar.CONFIG.PUSHBULLET_ENABLED and mylar.CONFIG.PUSHBULLET_ONSNATCH:
|
||||
logger.info(u"Sending Pushbullet notification")
|
||||
pushbullet = notifiers.PUSHBULLET()
|
||||
pushbullet.notify(snline=snline, snatched=nzbname, sent_to=sent_to, prov=nzbprov, method='POST')
|
||||
if mylar.TELEGRAM_ENABLED and mylar.TELEGRAM_ONSNATCH:
|
||||
if mylar.CONFIG.TELEGRAM_ENABLED and mylar.CONFIG.TELEGRAM_ONSNATCH:
|
||||
logger.info(u"Sending Telegram notification")
|
||||
telegram = notifiers.TELEGRAM()
|
||||
telegram.notify(snline, nzbname)
|
||||
if mylar.SLACK_ENABLED and mylar.SLACK_ONSNATCH:
|
||||
if mylar.CONFIG.SLACK_ENABLED and mylar.CONFIG.SLACK_ONSNATCH:
|
||||
logger.info(u"Sending Slack notification")
|
||||
slack = notifiers.SLACK()
|
||||
slack.notify("Snatched", snline)
|
||||
|
@ -2776,7 +2781,7 @@ def generate_id(nzbprov, link):
|
|||
url_parts = urlparse.urlparse(link)
|
||||
path_parts = url_parts[2].rpartition('/')
|
||||
nzbid = path_parts[0].rsplit('/', 1)[1]
|
||||
elif nzbprov == 'newznab':
|
||||
elif 'newznab' in nzbprov:
|
||||
#if in format of http://newznab/getnzb/<id>.nzb&i=1&r=apikey
|
||||
tmpid = urlparse.urlparse(link)[4] #param 4 is the query string from the url.
|
||||
if 'warp' in urlparse.urlparse(link)[2] and 'x=' in tmpid:
|
||||
|
@ -2793,10 +2798,10 @@ def generate_id(nzbprov, link):
|
|||
end = len(tmpid)
|
||||
nzbid = re.sub('&id=', '', tmpid[st:end]).strip()
|
||||
elif nzbprov == 'Torznab':
|
||||
if mylar.TORZNAB_HOST.endswith('/'):
|
||||
tmphost = mylar.TORZNAB_HOST + 'download/'
|
||||
if mylar.CONFIG.TORZNAB_HOST.endswith('/'):
|
||||
tmphost = mylar.CONFIG.TORZNAB_HOST + 'download/'
|
||||
else:
|
||||
tmphost = mylar.TORZNAB_HOST + '/download/'
|
||||
tmphost = mylar.CONFIG.TORZNAB_HOST + '/download/'
|
||||
tmpline = re.sub(tmphost, '', tmphost).strip()
|
||||
tmpidend = tmpline.find('/')
|
||||
nzbid = tmpline[:tmpidend]
|
||||
|
|
|
@ -36,11 +36,11 @@ from mylar import logger, helpers
|
|||
class RTorrent(object):
|
||||
def __init__(self):
|
||||
self.client = TorClient.TorrentClient()
|
||||
if not self.client.connect(mylar.RTORRENT_HOST,
|
||||
mylar.RTORRENT_USERNAME,
|
||||
mylar.RTORRENT_PASSWORD,
|
||||
mylar.RTORRENT_AUTHENTICATION):
|
||||
logger.error('could not connect to %s, exiting', mylar.RTORRENT_HOST)
|
||||
if not self.client.connect(mylar.CONFIG.RTORRENT_HOST,
|
||||
mylar.CONFIG.RTORRENT_USERNAME,
|
||||
mylar.CONFIG.RTORRENT_PASSWORD,
|
||||
mylar.CONFIG.RTORRENT_AUTHENTICATION):
|
||||
logger.error('could not connect to %s, exiting', mylar.CONFIG.RTORRENT_HOST)
|
||||
sys.exit(-1)
|
||||
|
||||
def main(self, torrent_hash=None, filepath=None, check=False):
|
||||
|
|
|
@ -123,19 +123,19 @@ class TorrentClient(object):
|
|||
return False
|
||||
|
||||
# If label enabled put label on torrent in Deluge
|
||||
if torrent_id and mylar.DELUGE_LABEL:
|
||||
logger.info ('Setting label to ' + mylar.DELUGE_LABEL)
|
||||
if torrent_id and mylar.CONFIG.DELUGE_LABEL:
|
||||
logger.info ('Setting label to ' + mylar.CONFIG.DELUGE_LABEL)
|
||||
try:
|
||||
self.client.call('label.set_torrent', torrent_id, mylar.DELUGE_LABEL)
|
||||
self.client.call('label.set_torrent', torrent_id, mylar.CONFIG.DELUGE_LABEL)
|
||||
except:
|
||||
#if label isn't set, let's try and create one.
|
||||
try:
|
||||
self.client.call('label.add', mylar.DELUGE_LABEL)
|
||||
self.client.call('label.set_torrent', torrent_id, mylar.DELUGE_LABEL)
|
||||
self.client.call('label.add', mylar.CONFIG.DELUGE_LABEL)
|
||||
self.client.call('label.set_torrent', torrent_id, mylar.CONFIG.DELUGE_LABEL)
|
||||
except:
|
||||
logger.warn('Unable to set label - Either try to create it manually within Deluge, and/or ensure there are no spaces, capitalization or special characters in label')
|
||||
else:
|
||||
logger.info('Succesfully set label to ' + mylar.DELUGE_LABEL)
|
||||
logger.info('Succesfully set label to ' + mylar.CONFIG.DELUGE_LABEL)
|
||||
|
||||
try:
|
||||
torrent_info = self.get_torrent(torrent_id)
|
||||
|
@ -146,7 +146,7 @@ class TorrentClient(object):
|
|||
else:
|
||||
logger.info('Torrent successfully added!')
|
||||
return {'hash': torrent_info['hash'],
|
||||
'label': mylar.DELUGE_LABEL,
|
||||
'label': mylar.CONFIG.DELUGE_LABEL,
|
||||
'folder': torrent_info['save_path'],
|
||||
'total_filesize': torrent_info['total_size'],
|
||||
'name': torrent_info['name'],
|
||||
|
|
|
@ -83,7 +83,7 @@ class TorrentClient(object):
|
|||
logger.info('Torrent not added yet, trying to add it now!')
|
||||
if filepath.startswith('magnet'):
|
||||
try:
|
||||
tid = self.client.download_from_link(filepath, category=str(mylar.QBITTORRENT_LABEL))
|
||||
tid = self.client.download_from_link(filepath, category=str(mylar.CONFIG.QBITTORRENT_LABEL))
|
||||
except Exception as e:
|
||||
logger.debug('Torrent not added')
|
||||
return {'status': False}
|
||||
|
@ -92,14 +92,14 @@ class TorrentClient(object):
|
|||
else:
|
||||
try:
|
||||
torrent_content = open(filepath, 'rb')
|
||||
tid = self.client.download_from_file(torrent_content, category=str(mylar.QBITTORRENT_LABEL))
|
||||
tid = self.client.download_from_file(torrent_content, category=str(mylar.CONFIG.QBITTORRENT_LABEL))
|
||||
except Exception as e:
|
||||
logger.debug('Torrent not added')
|
||||
return {'status': False}
|
||||
else:
|
||||
logger.debug('Successfully submitted for add via file. Verifying item is now on client.')
|
||||
|
||||
if mylar.QBITTORRENT_STARTONLOAD:
|
||||
if mylar.CONFIG.QBITTORRENT_STARTONLOAD:
|
||||
logger.info('attempting to start')
|
||||
startit = self.client.force_start(hash)
|
||||
logger.info('startit returned:' + str(startit))
|
||||
|
@ -132,7 +132,7 @@ class TorrentClient(object):
|
|||
'total_filesize': tinfo['total_size'],
|
||||
'folder': tinfo['save_path'],
|
||||
'time_started': tinfo['addition_date'],
|
||||
'label': mylar.QBITTORRENT_LABEL,
|
||||
'label': mylar.CONFIG.QBITTORRENT_LABEL,
|
||||
'status': True}
|
||||
|
||||
#logger.info(torrent_info)
|
||||
|
|
|
@ -13,12 +13,12 @@ class TorrentClient(object):
|
|||
|
||||
def getVerifySsl(self):
|
||||
# Ensure verification has been enabled
|
||||
if not mylar.RTORRENT_VERIFY:
|
||||
if not mylar.CONFIG.RTORRENT_VERIFY:
|
||||
return False
|
||||
|
||||
# Use ca bundle if defined
|
||||
if mylar.RTORRENT_CA_BUNDLE and os.path.exists(ca_bundle):
|
||||
return mylar.RTORRENT_CA_BUNDLE
|
||||
if mylar.CONFIG.RTORRENT_CA_BUNDLE is not None and os.path.exists(mylar.CONFIG.RTORRENT_CA_BUNDLE):
|
||||
return mylar.CONFIG.RTORRENT_CA_BUNDLE
|
||||
|
||||
# Use default ssl verification
|
||||
return True
|
||||
|
@ -30,17 +30,17 @@ class TorrentClient(object):
|
|||
if not host:
|
||||
return False
|
||||
|
||||
url = helpers.cleanHost(host, protocol = True, ssl = mylar.RTORRENT_SSL)
|
||||
url = helpers.cleanHost(host, protocol = True, ssl = mylar.CONFIG.RTORRENT_SSL)
|
||||
|
||||
# Automatically add '+https' to 'httprpc' protocol if SSL is enabled
|
||||
if mylar.RTORRENT_SSL and url.startswith('httprpc://'):
|
||||
if mylar.CONFIG.RTORRENT_SSL and url.startswith('httprpc://'):
|
||||
url = url.replace('httprpc://', 'httprpc+https://')
|
||||
|
||||
parsed = urlparse(url)
|
||||
|
||||
# rpc_url is only used on http/https scgi pass-through
|
||||
if parsed.scheme in ['http', 'https']:
|
||||
url += mylar.RTORRENT_RPC_URL
|
||||
url += mylar.CONFIG.RTORRENT_RPC_URL
|
||||
|
||||
#logger.fdebug(url)
|
||||
|
||||
|
@ -103,7 +103,7 @@ class TorrentClient(object):
|
|||
return torrent_info if torrent_info else False
|
||||
|
||||
def load_torrent(self, filepath):
|
||||
start = bool(mylar.RTORRENT_STARTONLOAD)
|
||||
start = bool(mylar.CONFIG.RTORRENT_STARTONLOAD)
|
||||
|
||||
if filepath.startswith('magnet'):
|
||||
logger.fdebug('torrent magnet link set to : ' + filepath)
|
||||
|
@ -139,13 +139,13 @@ class TorrentClient(object):
|
|||
# f.set_priority(0) #set them to not download just to see if this works...
|
||||
#torrent.updated_priorities()
|
||||
|
||||
if mylar.RTORRENT_LABEL:
|
||||
torrent.set_custom(1, mylar.RTORRENT_LABEL)
|
||||
logger.fdebug('Setting label for torrent to : ' + mylar.RTORRENT_LABEL)
|
||||
if mylar.CONFIG.RTORRENT_LABEL is not None:
|
||||
torrent.set_custom(1, mylar.CONFIG.RTORRENT_LABEL)
|
||||
logger.fdebug('Setting label for torrent to : ' + mylar.CONFIG.RTORRENT_LABEL)
|
||||
|
||||
if mylar.RTORRENT_DIRECTORY:
|
||||
torrent.set_directory(mylar.RTORRENT_DIRECTORY)
|
||||
logger.fdebug('Setting directory for torrent to : ' + mylar.RTORRENT_DIRECTORY)
|
||||
if mylar.CONFIG.RTORRENT_DIRECTORY is not None:
|
||||
torrent.set_directory(mylar.CONFIG.RTORRENT_DIRECTORY)
|
||||
logger.fdebug('Setting directory for torrent to : ' + mylar.CONFIG.RTORRENT_DIRECTORY)
|
||||
|
||||
logger.info('Successfully loaded torrent.')
|
||||
|
||||
|
|
|
@ -73,10 +73,10 @@ class TorrentClient(object):
|
|||
return torrent.stop()
|
||||
|
||||
def load_torrent(self, filepath):
|
||||
if any([mylar.TRANSMISSION_DIRECTORY is None, mylar.TRANSMISSION_DIRECTORY == '', mylar.TRANSMISSION_DIRECTORY == 'None']):
|
||||
down_dir = mylar.CHECK_FOLDER
|
||||
if any([mylar.CONFIG.TRANSMISSION_DIRECTORY is None, mylar.CONFIG.TRANSMISSION_DIRECTORY == '', mylar.CONFIG.TRANSMISSION_DIRECTORY == 'None']):
|
||||
down_dir = mylar.CONFIG.CHECK_FOLDER
|
||||
else:
|
||||
down_dir = mylar.TRANSMISSION_DIRECTORY
|
||||
down_dir = mylar.CONFIG.TRANSMISSION_DIRECTORY
|
||||
if filepath.startswith('magnet'):
|
||||
torrent = self.conn.add_torrent('%s' % filepath,
|
||||
download_dir=down_dir)
|
||||
|
|
117
mylar/updater.py
117
mylar/updater.py
|
@ -33,7 +33,7 @@ def dbUpdate(ComicIDList=None, calledfrom=None, sched=False):
|
|||
return
|
||||
myDB = db.DBConnection()
|
||||
if ComicIDList is None:
|
||||
if mylar.UPDATE_ENDED:
|
||||
if mylar.CONFIG.UPDATE_ENDED:
|
||||
logger.info('Updating only Continuing Series (option enabled) - this might cause problems with the pull-list matching for rebooted series')
|
||||
comiclist = []
|
||||
completelist = myDB.select('SELECT LatestDate, ComicPublished, ForceContinuing, NewPublish, LastUpdated, ComicID, ComicName, Corrected_SeriesYear, ComicYear from comics WHERE Status="Active" or Status="Loading" order by LastUpdated DESC, LatestDate ASC')
|
||||
|
@ -80,6 +80,8 @@ def dbUpdate(ComicIDList=None, calledfrom=None, sched=False):
|
|||
|
||||
cnt = 1
|
||||
|
||||
logger.fdebug('Refresh sequence set to fire every %s minutes for %s day(s)' % (mylar.DBUPDATE_INTERVAL, mylar.CONFIG.REFRESH_CACHE))
|
||||
|
||||
for comic in sorted(comiclist, key=operator.itemgetter('LastUpdated'), reverse=True):
|
||||
dspyear = comic['ComicYear']
|
||||
csyear = None
|
||||
|
@ -101,8 +103,9 @@ def dbUpdate(ComicIDList=None, calledfrom=None, sched=False):
|
|||
n_date = datetime.datetime.now()
|
||||
absdiff = abs(n_date - c_obj_date)
|
||||
hours = (absdiff.days * 24 * 60 * 60 + absdiff.seconds) / 3600.0
|
||||
if hours < 5:
|
||||
logger.fdebug(ComicName + '[' + str(ComicID) + '] Was refreshed less than 5 hours ago. Skipping Refresh at this time.')
|
||||
cache_hours = mylar.CONFIG.REFRESH_CACHE * 24
|
||||
if hours < cache_hours:
|
||||
#logger.fdebug('%s [%s] Was refreshed less than %s hours ago. Skipping Refresh at this time.' % (ComicName, ComicID, cache_hours))
|
||||
cnt +=1
|
||||
continue
|
||||
logger.info('[' + str(cnt) + '/' + str(len(comiclist)) + '] Refreshing :' + ComicName + ' (' + str(dspyear) + ') [' + str(ComicID) + ']')
|
||||
|
@ -113,7 +116,7 @@ def dbUpdate(ComicIDList=None, calledfrom=None, sched=False):
|
|||
logger.fdebug('Refreshing: ' + ComicName + ' (' + str(dspyear) + ') [' + str(ComicID) + ']')
|
||||
|
||||
mismatch = "no"
|
||||
if not mylar.CV_ONLY or ComicID[:1] == "G":
|
||||
if not mylar.CONFIG.CV_ONLY or ComicID[:1] == "G":
|
||||
|
||||
CV_EXcomicid = myDB.selectone("SELECT * from exceptions WHERE ComicID=?", [ComicID]).fetchone()
|
||||
if CV_EXcomicid is None: pass
|
||||
|
@ -125,7 +128,7 @@ def dbUpdate(ComicIDList=None, calledfrom=None, sched=False):
|
|||
else:
|
||||
cchk = importer.addComictoDB(ComicID, mismatch)
|
||||
else:
|
||||
if mylar.CV_ONETIMER == 1:
|
||||
if mylar.CONFIG.CV_ONETIMER == 1:
|
||||
if sched is True:
|
||||
helpers.job_management(write=True, job='DB Updater', current_run=helpers.utctimestamp(), status='Running')
|
||||
mylar.UPDATER_STATUS = 'Running'
|
||||
|
@ -153,7 +156,7 @@ def dbUpdate(ComicIDList=None, calledfrom=None, sched=False):
|
|||
|
||||
annload = [] #initiate the list here so we don't error out below.
|
||||
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
#now we load the annuals into memory to pass through to importer when refreshing so that it can
|
||||
#refresh even the manually added annuals.
|
||||
annual_load = myDB.select('SELECT * FROM annuals WHERE ComicID=?', [ComicID])
|
||||
|
@ -194,7 +197,7 @@ def dbUpdate(ComicIDList=None, calledfrom=None, sched=False):
|
|||
annuals = []
|
||||
ann_list = []
|
||||
#reload the annuals here.
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
annuals_list = myDB.select('SELECT * FROM annuals WHERE ComicID=?', [ComicID])
|
||||
ann_list += annuals_list
|
||||
issues_new += annuals_list
|
||||
|
@ -237,11 +240,11 @@ def dbUpdate(ComicIDList=None, calledfrom=None, sched=False):
|
|||
newVAL = {"Status": issue['Status']}
|
||||
|
||||
if all([issuenew['Status'] == None, issue['Status'] == 'Skipped']):
|
||||
if issuenew['ReleaseDate'] == '00000000':
|
||||
if issuenew['ReleaseDate'] == '0000-00-00':
|
||||
dk = re.sub('-', '', issue['IssueDate']).strip()
|
||||
else:
|
||||
dk = re.sub('-', '', issuenew['ReleaseDate']).strip() # converts date to 20140718 format
|
||||
if dk == '00000000':
|
||||
if dk == '0000-00-00':
|
||||
logger.warn('Issue Data is invalid for Issue Number %s. Marking this issue as Skipped' % issue['Issue_Number'])
|
||||
newVAL = {"Status": "Skipped"}
|
||||
else:
|
||||
|
@ -249,9 +252,10 @@ def dbUpdate(ComicIDList=None, calledfrom=None, sched=False):
|
|||
nowdate = datetime.datetime.now()
|
||||
now_week = datetime.datetime.strftime(nowdate, "%Y%U")
|
||||
issue_week = datetime.datetime.strftime(datechk, "%Y%U")
|
||||
if mylar.AUTOWANT_ALL:
|
||||
if mylar.CONFIG.AUTOWANT_ALL:
|
||||
newVAL = {"Status": "Wanted"}
|
||||
elif issue_week >= now_week:
|
||||
logger.fdebug('Issue_week: %s -- now_week: %s' % (issue_week, now_week))
|
||||
logger.fdebug('Issue date [%s] is in/beyond current week - marking as Wanted.' % dk)
|
||||
newVAL = {"Status": "Wanted"}
|
||||
else:
|
||||
|
@ -284,7 +288,7 @@ def dbUpdate(ComicIDList=None, calledfrom=None, sched=False):
|
|||
|
||||
issuesnew = myDB.select('SELECT * FROM issues WHERE ComicID=? AND Status is NULL', [ComicID])
|
||||
|
||||
if mylar.AUTOWANT_UPCOMING:
|
||||
if mylar.CONFIG.AUTOWANT_UPCOMING:
|
||||
newstatus = "Wanted"
|
||||
else:
|
||||
newstatus = "Skipped"
|
||||
|
@ -296,7 +300,7 @@ def dbUpdate(ComicIDList=None, calledfrom=None, sched=False):
|
|||
"Status": newstatus,
|
||||
"Annual": False})
|
||||
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
annualsnew = myDB.select('SELECT * FROM annuals WHERE ComicID=? AND Status is NULL', [ComicID])
|
||||
|
||||
for ann in annualsnew:
|
||||
|
@ -374,7 +378,7 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None,
|
|||
else:
|
||||
if CV_EXcomicid['variloop'] == '99':
|
||||
mismatch = "yes"
|
||||
if mylar.ALT_PULL != 2 or mylar.PULLBYFILE is True:
|
||||
if mylar.CONFIG.ALT_PULL != 2 or mylar.PULLBYFILE is True:
|
||||
lastupdatechk = myDB.selectone("SELECT * FROM comics WHERE ComicID=?", [ComicID]).fetchone()
|
||||
if lastupdatechk is None:
|
||||
pullupd = "yes"
|
||||
|
@ -389,12 +393,12 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None,
|
|||
hours = (absdiff.days * 24 * 60 * 60 + absdiff.seconds) / 3600.0
|
||||
else:
|
||||
#if it's at this point and the refresh is None, odds are very good that it's already up-to-date so let it flow thru
|
||||
if mylar.PULL_REFRESH is None:
|
||||
mylar.PULL_REFRESH = datetime.datetime.today()
|
||||
if mylar.CONFIG.PULL_REFRESH is None:
|
||||
mylar.CONFIG.PULL_REFRESH = datetime.datetime.today()
|
||||
#update the PULL_REFRESH
|
||||
mylar.config_write()
|
||||
logger.fdebug('pull_refresh: ' + str(mylar.PULL_REFRESH))
|
||||
c_obj_date = datetime.datetime.strptime(str(mylar.PULL_REFRESH),"%Y-%m-%d %H:%M:%S")
|
||||
#mylar.config_write()
|
||||
logger.fdebug('pull_refresh: ' + str(mylar.CONFIG.PULL_REFRESH))
|
||||
c_obj_date = datetime.datetime.strptime(str(mylar.CONFIG.PULL_REFRESH),"%Y-%m-%d %H:%M:%S")
|
||||
#logger.fdebug('c_obj_date: ' + str(c_obj_date))
|
||||
n_date = datetime.datetime.now()
|
||||
#logger.fdebug('n_date: ' + str(n_date))
|
||||
|
@ -404,7 +408,7 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None,
|
|||
#logger.fdebug('hours: ' + str(hours))
|
||||
|
||||
if 'annual' in ComicName.lower():
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
logger.info('checking: ' + str(ComicID) + ' -- issue#: ' + str(IssueNumber))
|
||||
issuechk = myDB.selectone("SELECT * FROM annuals WHERE ComicID=? AND Issue_Number=?", [ComicID, IssueNumber]).fetchone()
|
||||
else:
|
||||
|
@ -418,7 +422,7 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None,
|
|||
if issuechk is None:
|
||||
if futurepull is None:
|
||||
og_status = None
|
||||
if mylar.ALT_PULL != 2 or mylar.PULLBYFILE is True:
|
||||
if mylar.CONFIG.ALT_PULL != 2 or mylar.PULLBYFILE is True:
|
||||
logger.fdebug(adjComicName + ' Issue: ' + str(IssueNumber) + ' not present in listings to mark for download...updating comic and adding to Upcoming Wanted Releases.')
|
||||
# we need to either decrease the total issue count, OR indicate that an issue is upcoming.
|
||||
upco_results = myDB.select("SELECT COUNT(*) FROM UPCOMING WHERE ComicID=?", [ComicID])
|
||||
|
@ -449,9 +453,9 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None,
|
|||
logger.fdebug('hours: ' + str(hours) + ' -- forcecheck: ' + str(forcecheck))
|
||||
if hours > 2 or forcecheck == 'yes':
|
||||
logger.fdebug('weekinfo:' + str(weekinfo))
|
||||
mylar.PULL_REFRESH = datetime.datetime.today()
|
||||
mylar.CONFIG.PULL_REFRESH = datetime.datetime.today()
|
||||
#update the PULL_REFRESH
|
||||
mylar.config_write()
|
||||
#mylar.config_write()
|
||||
chkitout = mylar.locg.locg(weeknumber=str(weekinfo['weeknumber']),year=str(weekinfo['year']))
|
||||
|
||||
logger.fdebug('linking ComicID to Pull-list to reflect status.')
|
||||
|
@ -466,7 +470,6 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None,
|
|||
nVal = {"Status": "Wanted"}
|
||||
myDB.upsert("future", nVal, nKey)
|
||||
return
|
||||
|
||||
if issuechk is not None:
|
||||
if issuechk['Issue_Number'] == IssueNumber or issuechk['Issue_Number'] == altissuenumber:
|
||||
og_status = issuechk['Status']
|
||||
|
@ -505,8 +508,8 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None,
|
|||
values = {"Status": "Archived"}
|
||||
newValue['Status'] = "Archived"
|
||||
elif og_status == 'Failed':
|
||||
if mylar.FAILED_DOWNLOAD_HANDLING:
|
||||
if mylar.FAILED_AUTO:
|
||||
if mylar.CONFIG.FAILED_DOWNLOAD_HANDLING:
|
||||
if mylar.CONFIG.FAILED_AUTO:
|
||||
values = {"Status": "Wanted"}
|
||||
newValue['Status'] = "Wanted"
|
||||
else:
|
||||
|
@ -523,7 +526,7 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None,
|
|||
logger.fdebug('Issues do not match for some reason...weekly new issue: %s' % IssueNumber)
|
||||
return
|
||||
|
||||
if mylar.AUTOWANT_UPCOMING:
|
||||
if mylar.CONFIG.AUTOWANT_UPCOMING:
|
||||
#for issues not in db - to be added to Upcoming table.
|
||||
if og_status is None:
|
||||
newValue['Status'] = "Wanted"
|
||||
|
@ -615,7 +618,7 @@ def weekly_update(ComicName, IssueNumber, CStatus, CID, weeknumber, year, altiss
|
|||
newValue = {"STATUS": CStatus}
|
||||
|
||||
else:
|
||||
if mylar.AUTOWANT_UPCOMING:
|
||||
if mylar.CONFIG.AUTOWANT_UPCOMING:
|
||||
newValue = {"STATUS": "Wanted"}
|
||||
else:
|
||||
newValue = {"STATUS": "Skipped"}
|
||||
|
@ -630,7 +633,7 @@ def weekly_update(ComicName, IssueNumber, CStatus, CID, weeknumber, year, altiss
|
|||
|
||||
def newpullcheck(ComicName, ComicID, issue=None):
|
||||
# When adding a new comic, let's check for new issues on this week's pullist and update.
|
||||
if mylar.ALT_PULL != 2 or mylar.PULLBYFILE is True:
|
||||
if mylar.CONFIG.ALT_PULL != 2 or mylar.PULLBYFILE is True:
|
||||
mylar.weeklypull.pullitcheck(comic1off_name=ComicName, comic1off_id=ComicID, issue=issue)
|
||||
else:
|
||||
mylar.weeklypull.new_pullcheck(weeknumber=mylar.CURRENT_WEEKNUMBER, pullyear=mylar.CURRENT_YEAR, comic1off_name=ComicName, comic1off_id=ComicID, issue=issue)
|
||||
|
@ -663,13 +666,13 @@ def nzblog(IssueID, NZBName, ComicName, SARC=None, IssueArcID=None, id=None, pro
|
|||
if IssueID is None or IssueID == 'None':
|
||||
#if IssueID is None, it's a one-off download from the pull-list.
|
||||
#give it a generic ID above the last one so it doesn't throw an error later.
|
||||
if any([mylar.HIGHCOUNT == 0, mylar.HIGHCOUNT is None]):
|
||||
mylar.HIGHCOUNT = 900000
|
||||
if any([mylar.CONFIG.HIGHCOUNT == 0, mylar.CONFIG.HIGHCOUNT is None]):
|
||||
mylar.CONFIG.HIGHCOUNT = 900000
|
||||
else:
|
||||
mylar.HIGHCOUNT+=1
|
||||
mylar.CONFIG.HIGHCOUNT+=1
|
||||
|
||||
IssueID = mylar.HIGHCOUNT
|
||||
mylar.config_write()
|
||||
IssueID = mylar.CONFIG.HIGHCOUNT
|
||||
#mylar.config_write()
|
||||
|
||||
controlValue = {"IssueID": IssueID,
|
||||
"Provider": prov}
|
||||
|
@ -932,11 +935,11 @@ def forceRescan(ComicID, archive=None, module=None):
|
|||
comiccnt = int(tmpval['comiccount'])
|
||||
#logger.fdebug(module + 'comiccnt is:' + str(comiccnt))
|
||||
fca.append(tmpval)
|
||||
if all([mylar.MULTIPLE_DEST_DIRS is not None, mylar.MULTIPLE_DEST_DIRS != 'None', os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(rescan['ComicLocation'])) != rescan['ComicLocation'], os.path.exists(os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(rescan['ComicLocation'])))]):
|
||||
logger.fdebug(module + 'multiple_dest_dirs:' + mylar.MULTIPLE_DEST_DIRS)
|
||||
if all([mylar.CONFIG.MULTIPLE_DEST_DIRS is not None, mylar.CONFIG.MULTIPLE_DEST_DIRS != 'None', os.path.join(mylar.CONFIG.MULTIPLE_DEST_DIRS, os.path.basename(rescan['ComicLocation'])) != rescan['ComicLocation'], os.path.exists(os.path.join(mylar.CONFIG.MULTIPLE_DEST_DIRS, os.path.basename(rescan['ComicLocation'])))]):
|
||||
logger.fdebug(module + 'multiple_dest_dirs:' + mylar.CONFIG.MULTIPLE_DEST_DIRS)
|
||||
logger.fdebug(module + 'dir: ' + rescan['ComicLocation'])
|
||||
logger.fdebug(module + 'os.path.basename: ' + os.path.basename(rescan['ComicLocation']))
|
||||
pathdir = os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(rescan['ComicLocation']))
|
||||
pathdir = os.path.join(mylar.CONFIG.MULTIPLE_DEST_DIRS, os.path.basename(rescan['ComicLocation']))
|
||||
logger.info(module + ' Now checking files for ' + rescan['ComicName'] + ' (' + str(rescan['ComicYear']) + ') in :' + pathdir)
|
||||
mvals = filechecker.FileChecker(dir=pathdir, watchcomic=rescan['ComicName'], Publisher=rescan['ComicPublisher'], AlternateSearch=altnames)
|
||||
tmpv = mvals.listFiles()
|
||||
|
@ -972,7 +975,7 @@ def forceRescan(ComicID, archive=None, module=None):
|
|||
#iscnt = rescan['Total']
|
||||
|
||||
havefiles = 0
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
an_cnt = myDB.select("SELECT COUNT(*) FROM annuals WHERE ComicID=?", [ComicID])
|
||||
anncnt = an_cnt[0][0]
|
||||
else:
|
||||
|
@ -1007,7 +1010,7 @@ def forceRescan(ComicID, archive=None, module=None):
|
|||
mc_annual = []
|
||||
mc_annualnumber = []
|
||||
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
mult_ann_check = myDB.select('SELECT * FROM annuals WHERE ComicID=? GROUP BY Int_IssueNumber HAVING (COUNT(Int_IssueNumber) > 1)', [ComicID])
|
||||
|
||||
if len(mult_ann_check) == 0:
|
||||
|
@ -1118,11 +1121,11 @@ def forceRescan(ComicID, archive=None, module=None):
|
|||
if di['fcdigit'] == fcdigit:
|
||||
#base off of config - base duplication keep on filesize or file-type (or both)
|
||||
logger.fdebug('[DUPECHECK] Duplicate issue detected [' + di['filename'] + '] [' + tmpfc['ComicFilename'] + ']')
|
||||
# mylar.DUPECONSTRAINT = 'filesize' / 'filetype-cbr' / 'filetype-cbz'
|
||||
logger.fdebug('[DUPECHECK] Based on duplication preferences I will retain based on : ' + mylar.DUPECONSTRAINT)
|
||||
# mylar.CONFIG.DUPECONSTRAINT = 'filesize' / 'filetype-cbr' / 'filetype-cbz'
|
||||
logger.fdebug('[DUPECHECK] Based on duplication preferences I will retain based on : ' + mylar.CONFIG.DUPECONSTRAINT)
|
||||
removedupe = False
|
||||
if 'cbr' in mylar.DUPECONSTRAINT or 'cbz' in mylar.DUPECONSTRAINT:
|
||||
if 'cbr' in mylar.DUPECONSTRAINT:
|
||||
if 'cbr' in mylar.CONFIG.DUPECONSTRAINT or 'cbz' in mylar.CONFIG.DUPECONSTRAINT:
|
||||
if 'cbr' in mylar.CONFIG.DUPECONSTRAINT:
|
||||
#this has to be configured in config - either retain cbr or cbz.
|
||||
if tmpfc['ComicFilename'].endswith('.cbz'):
|
||||
#keep di['filename']
|
||||
|
@ -1133,7 +1136,7 @@ def forceRescan(ComicID, archive=None, module=None):
|
|||
#keep tmpfc['ComicFilename']
|
||||
logger.fdebug('[DUPECHECK-CBR PRIORITY] [#' + reiss['Issue_Number'] + '] Retaining newly scanned in file : ' + tmpfc['ComicFilename'])
|
||||
removedupe = True
|
||||
elif 'cbz' in mylar.DUPECONSTRAINT:
|
||||
elif 'cbz' in mylar.CONFIG.DUPECONSTRAINT:
|
||||
if tmpfc['ComicFilename'].endswith('.cbr'):
|
||||
#keep di['filename']
|
||||
logger.fdebug('[DUPECHECK-CBZ PRIORITY] [#' + reiss['Issue_Number'] + '] Retaining currently scanned in filename : ' + di['filename'])
|
||||
|
@ -1144,7 +1147,7 @@ def forceRescan(ComicID, archive=None, module=None):
|
|||
logger.fdebug('[DUPECHECK-CBZ PRIORITY] [#' + reiss['Issue_Number'] + '] Retaining newly scanned in filename : ' + tmpfc['ComicFilename'])
|
||||
removedupe = True
|
||||
|
||||
if mylar.DUPECONSTRAINT == 'filesize':
|
||||
if mylar.CONFIG.DUPECONSTRAINT == 'filesize':
|
||||
if tmpfc['ComicSize'] <= di['filesize']:
|
||||
logger.fdebug('[DUPECHECK-FILESIZE PRIORITY] [#' + reiss['Issue_Number'] + '] Retaining currently scanned in filename : ' + di['filename'])
|
||||
issuedupe = "yes"
|
||||
|
@ -1265,11 +1268,11 @@ def forceRescan(ComicID, archive=None, module=None):
|
|||
if di['fcdigit'] == fcdigit and di['issueid'] == reann['IssueID']:
|
||||
#base off of config - base duplication keep on filesize or file-type (or both)
|
||||
logger.fdebug('[DUPECHECK] Duplicate issue detected [' + di['filename'] + '] [' + tmpfc['ComicFilename'] + ']')
|
||||
# mylar.DUPECONSTRAINT = 'filesize' / 'filetype-cbr' / 'filetype-cbz'
|
||||
logger.fdebug('[DUPECHECK] Based on duplication preferences I will retain based on : ' + mylar.DUPECONSTRAINT)
|
||||
# mylar.CONFIG.DUPECONSTRAINT = 'filesize' / 'filetype-cbr' / 'filetype-cbz'
|
||||
logger.fdebug('[DUPECHECK] Based on duplication preferences I will retain based on : ' + mylar.CONFIG.DUPECONSTRAINT)
|
||||
removedupe = False
|
||||
if 'cbr' in mylar.DUPECONSTRAINT or 'cbz' in mylar.DUPECONSTRAINT:
|
||||
if 'cbr' in mylar.DUPECONSTRAINT:
|
||||
if 'cbr' in mylar.CONFIG.DUPECONSTRAINT or 'cbz' in mylar.CONFIG.DUPECONSTRAINT:
|
||||
if 'cbr' in mylar.CONFIG.DUPECONSTRAINT:
|
||||
#this has to be configured in config - either retain cbr or cbz.
|
||||
if tmpfc['ComicFilename'].endswith('.cbz'):
|
||||
#keep di['filename']
|
||||
|
@ -1280,7 +1283,7 @@ def forceRescan(ComicID, archive=None, module=None):
|
|||
#keep tmpfc['ComicFilename']
|
||||
logger.fdebug('[DUPECHECK-CBR PRIORITY] [#' + reann['Issue_Number'] + '] Retaining newly scanned in file : ' + tmpfc['ComicFilename'])
|
||||
removedupe = True
|
||||
elif 'cbz' in mylar.DUPECONSTRAINT:
|
||||
elif 'cbz' in mylar.CONFIG.DUPECONSTRAINT:
|
||||
if tmpfc['ComicFilename'].endswith('.cbr'):
|
||||
#keep di['filename']
|
||||
logger.fdebug('[DUPECHECK-CBZ PRIORITY] [#' + reann['Issue_Number'] + '] Retaining currently scanned in filename : ' + di['filename'])
|
||||
|
@ -1291,7 +1294,7 @@ def forceRescan(ComicID, archive=None, module=None):
|
|||
logger.fdebug('[DUPECHECK-CBZ PRIORITY] [#' + reann['Issue_Number'] + '] Retaining newly scanned in filename : ' + tmpfc['ComicFilename'])
|
||||
removedupe = True
|
||||
|
||||
if mylar.DUPECONSTRAINT == 'filesize':
|
||||
if mylar.CONFIG.DUPECONSTRAINT == 'filesize':
|
||||
if tmpfc['ComicSize'] <= di['filesize']:
|
||||
logger.fdebug('[DUPECHECK-FILESIZE PRIORITY] [#' + reann['Issue_Number'] + '] Retaining currently scanned in filename : ' + di['filename'])
|
||||
annualdupe = "yes"
|
||||
|
@ -1349,7 +1352,7 @@ def forceRescan(ComicID, archive=None, module=None):
|
|||
#-- if annuals aren't enabled, this will bugger out.
|
||||
writeit = True
|
||||
try:
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
if 'annual' in temploc.lower():
|
||||
if reann is None:
|
||||
logger.fdebug(module + ' Annual present in location, but series does not have any annuals attached to it - Ignoring')
|
||||
|
@ -1412,7 +1415,7 @@ def forceRescan(ComicID, archive=None, module=None):
|
|||
old_status = chk['Status']
|
||||
|
||||
if old_status == "Skipped":
|
||||
if mylar.AUTOWANT_ALL:
|
||||
if mylar.CONFIG.AUTOWANT_ALL:
|
||||
issStatus = "Wanted"
|
||||
else:
|
||||
issStatus = "Skipped"
|
||||
|
@ -1467,7 +1470,7 @@ def forceRescan(ComicID, archive=None, module=None):
|
|||
havefiles = havefiles + (arcfiles + arcanns)
|
||||
|
||||
ignorecount = 0
|
||||
if mylar.IGNORE_HAVETOTAL: # if this is enabled, will increase Have total as if in Archived Status
|
||||
if mylar.CONFIG.IGNORE_HAVETOTAL: # if this is enabled, will increase Have total as if in Archived Status
|
||||
ignores = myDB.select("SELECT count(*) FROM issues WHERE ComicID=? AND Status='Ignored'", [ComicID])
|
||||
if int(ignores[0][0]) > 0:
|
||||
ignorecount = ignores[0][0]
|
||||
|
@ -1475,7 +1478,7 @@ def forceRescan(ComicID, archive=None, module=None):
|
|||
logger.fdebug(module + ' Adjusting have total to ' + str(havefiles) + ' because of this many Ignored files:' + str(ignorecount))
|
||||
|
||||
snatchedcount = 0
|
||||
if mylar.SNATCHED_HAVETOTAL: # if this is enabled, will increase Have total as if in Archived Status
|
||||
if mylar.CONFIG.SNATCHED_HAVETOTAL: # if this is enabled, will increase Have total as if in Archived Status
|
||||
snatches = myDB.select("SELECT count(*) FROM issues WHERE ComicID=? AND Status='Snatched'", [ComicID])
|
||||
if int(snatches[0][0]) > 0:
|
||||
snatchedcount = snatches[0][0]
|
||||
|
@ -1511,8 +1514,8 @@ def forceRescan(ComicID, archive=None, module=None):
|
|||
continue
|
||||
logger.fdebug('Issue exists - no need to change status.')
|
||||
else:
|
||||
if mylar.MULTIPLE_DEST_DIRS is not None and mylar.MULTIPLE_DEST_DIRS != 'None':
|
||||
if os.path.exists(os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(rescan['ComicLocation']))):
|
||||
if mylar.CONFIG.MULTIPLE_DEST_DIRS is not None and mylar.CONFIG.MULTIPLE_DEST_DIRS != 'None':
|
||||
if os.path.exists(os.path.join(mylar.CONFIG.MULTIPLE_DEST_DIRS, os.path.basename(rescan['ComicLocation']))):
|
||||
#logger.fdebug('Issue(s) currently exist and found within multiple destination directory location')
|
||||
continue
|
||||
#print "Changing status from Downloaded to Archived - cannot locate file"
|
||||
|
@ -1542,7 +1545,7 @@ def forceRescan(ComicID, archive=None, module=None):
|
|||
|
||||
myDB.upsert("comics", newValueStat, controlValueStat)
|
||||
#enforce permissions
|
||||
if mylar.ENFORCE_PERMS:
|
||||
if mylar.CONFIG.ENFORCE_PERMS:
|
||||
logger.fdebug(module + ' Ensuring permissions/ownership enforced for series: ' + rescan['ComicName'])
|
||||
filechecker.setperms(rescan['ComicLocation'])
|
||||
logger.info(module + ' I have physically found ' + str(foundcount) + ' issues, ignored ' + str(ignorecount) + ' issues, snatched ' + str(snatchedcount) + ' issues, and accounted for ' + str(totalarc) + ' in an Archived state [ Total Issue Count: ' + str(havefiles) + ' / ' + str(combined_total) + ' ]')
|
||||
|
|
|
@ -27,7 +27,7 @@ class utorrentclient(object):
|
|||
|
||||
def __init__(self):
|
||||
|
||||
host = mylar.UTORRENT_HOST #has to be in the format of URL:PORT
|
||||
host = mylar.CONFIG.UTORRENT_HOST #has to be in the format of URL:PORT
|
||||
if not host.startswith('http'):
|
||||
host = 'http://' + host
|
||||
|
||||
|
@ -38,8 +38,8 @@ class utorrentclient(object):
|
|||
host = host[:-4]
|
||||
|
||||
self.base_url = host
|
||||
self.username = mylar.UTORRENT_USERNAME
|
||||
self.password = mylar.UTORRENT_PASSWORD
|
||||
self.username = mylar.CONFIG.UTORRENT_USERNAME
|
||||
self.password = mylar.CONFIG.UTORRENT_PASSWORD
|
||||
self.utorrent_url = '%s/gui/' % (self.base_url)
|
||||
self.auth = requests.auth.HTTPBasicAuth(self.username, self.password)
|
||||
self.token, self.cookies = self._get_token()
|
||||
|
@ -82,7 +82,7 @@ class utorrentclient(object):
|
|||
if str(r.status_code) == '200':
|
||||
logger.info('Successfully added torrent to uTorrent client.')
|
||||
hash = self.calculate_torrent_hash(data=tordata)
|
||||
if mylar.UTORRENT_LABEL:
|
||||
if mylar.CONFIG.UTORRENT_LABEL:
|
||||
try:
|
||||
self.setlabel(hash)
|
||||
except:
|
||||
|
@ -104,7 +104,7 @@ class utorrentclient(object):
|
|||
if str(r.status_code) == '200':
|
||||
logger.info('Successfully added torrent to uTorrent client.')
|
||||
hash = self.calculate_torrent_hash(link=url)
|
||||
if mylar.UTORRENT_LABEL:
|
||||
if mylar.CONFIG.UTORRENT_LABEL:
|
||||
try:
|
||||
self.setlabel(hash)
|
||||
except:
|
||||
|
@ -115,10 +115,10 @@ class utorrentclient(object):
|
|||
|
||||
|
||||
def setlabel(self, hash):
|
||||
params = {'token': self.token, 'action': 'setprops', 'hash': hash, 's': 'label', 'v': str(mylar.UTORRENT_LABEL)}
|
||||
params = {'token': self.token, 'action': 'setprops', 'hash': hash, 's': 'label', 'v': str(mylar.CONFIG.UTORRENT_LABEL)}
|
||||
r = requests.post(url=self.utorrent_url, auth=self.auth, cookies=self.cookies, params=params)
|
||||
if str(r.status_code) == '200':
|
||||
logger.info('label ' + str(mylar.UTORRENT_LABEL) + ' successfully applied')
|
||||
logger.info('label ' + str(mylar.CONFIG.UTORRENT_LABEL) + ' successfully applied')
|
||||
else:
|
||||
logger.info('Unable to label torrent')
|
||||
return
|
||||
|
@ -148,7 +148,7 @@ class utorrentclient(object):
|
|||
|
||||
# not implemented yet #
|
||||
# def load_torrent(self, filepath):
|
||||
# start = bool(mylar.UTORRENT_STARTONLOAD)
|
||||
# start = bool(mylar.CONFIG.UTORRENT_STARTONLOAD)
|
||||
|
||||
# logger.info('filepath to torrent file set to : ' + filepath)
|
||||
#
|
||||
|
@ -157,9 +157,9 @@ class utorrentclient(object):
|
|||
# if not torrent:
|
||||
# return False
|
||||
|
||||
# if mylar.UTORRENT_LABEL:
|
||||
# if mylar.CONFIG.UTORRENT_LABEL:
|
||||
# self.setlabel(torrent)
|
||||
# logger.info('Setting label for torrent to : ' + mylar.UTORRENT_LABEL)
|
||||
# logger.info('Setting label for torrent to : ' + mylar.CONFIG.UTORRENT_LABEL)
|
||||
|
||||
# logger.info('Successfully loaded torrent.')
|
||||
|
||||
|
|
|
@ -26,8 +26,8 @@ import re
|
|||
|
||||
def runGit(args):
|
||||
|
||||
if mylar.GIT_PATH:
|
||||
git_locations = ['"' +mylar.GIT_PATH +'"']
|
||||
if mylar.CONFIG.GIT_PATH:
|
||||
git_locations = ['"' +mylar.CONFIG.GIT_PATH +'"']
|
||||
else:
|
||||
git_locations = ['git']
|
||||
|
||||
|
@ -42,10 +42,10 @@ def runGit(args):
|
|||
cmd = cur_git +' ' +args
|
||||
|
||||
try:
|
||||
logger.debug('Trying to execute: "' + cmd + '" with shell in ' + mylar.PROG_DIR)
|
||||
#logger.debug('Trying to execute: "' + cmd + '" with shell in ' + mylar.PROG_DIR)
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, cwd=mylar.PROG_DIR)
|
||||
output, err = p.communicate()
|
||||
logger.debug('Git output: ' + output)
|
||||
#logger.debug('Git output: ' + output)
|
||||
except OSError:
|
||||
logger.debug('Command ' + cmd + ' didn\'t work, couldn\'t find git')
|
||||
continue
|
||||
|
@ -63,7 +63,7 @@ def runGit(args):
|
|||
|
||||
def getVersion():
|
||||
|
||||
if mylar.GIT_BRANCH is not None and mylar.GIT_BRANCH.startswith('win32build'):
|
||||
if mylar.CONFIG.GIT_BRANCH is not None and mylar.CONFIG.GIT_BRANCH.startswith('win32build'):
|
||||
|
||||
mylar.INSTALL_TYPE = 'win'
|
||||
|
||||
|
@ -91,8 +91,8 @@ def getVersion():
|
|||
logger.error('Output does not look like a hash, not using it')
|
||||
cur_commit_hash = None
|
||||
|
||||
if mylar.GIT_BRANCH:
|
||||
branch = mylar.GIT_BRANCH
|
||||
if mylar.CONFIG.GIT_BRANCH:
|
||||
branch = mylar.CONFIG.GIT_BRANCH
|
||||
|
||||
else:
|
||||
branch = None
|
||||
|
@ -102,11 +102,10 @@ def getVersion():
|
|||
if '*' in line:
|
||||
branch = re.sub('[\*\n]','',line).strip()
|
||||
break
|
||||
|
||||
|
||||
if not branch and mylar.GIT_BRANCH:
|
||||
logger.warn('Unable to retrieve branch name [' + branch + '] from git. Setting branch to configuration value of : ' + mylar.GIT_BRANCH)
|
||||
branch = mylar.GIT_BRANCH
|
||||
if not branch and mylar.CONFIG.GIT_BRANCH:
|
||||
logger.warn('Unable to retrieve branch name [' + branch + '] from git. Setting branch to configuration value of : ' + mylar.CONFIG.GIT_BRANCH)
|
||||
branch = mylar.CONFIG.GIT_BRANCH
|
||||
if not branch:
|
||||
logger.warn('Could not retrieve branch name [' + branch + '] form git. Defaulting to Master.')
|
||||
branch = 'master'
|
||||
|
@ -128,9 +127,9 @@ def getVersion():
|
|||
current_version = f.read().strip(' \n\r')
|
||||
|
||||
if current_version:
|
||||
if mylar.GIT_BRANCH:
|
||||
logger.info('Branch detected & set to : ' + mylar.GIT_BRANCH)
|
||||
return current_version, mylar.GIT_BRANCH
|
||||
if mylar.CONFIG.GIT_BRANCH:
|
||||
logger.info('Branch detected & set to : ' + mylar.CONFIG.GIT_BRANCH)
|
||||
return current_version, mylar.CONFIG.GIT_BRANCH
|
||||
else:
|
||||
logger.warn('No branch specified within config - will attempt to poll version from mylar')
|
||||
try:
|
||||
|
@ -141,9 +140,9 @@ def getVersion():
|
|||
logger.info('Unable to detect branch properly - set branch in config.ini, currently defaulting to : ' + branch)
|
||||
return current_version, branch
|
||||
else:
|
||||
if mylar.GIT_BRANCH:
|
||||
logger.info('Branch detected & set to : ' + mylar.GIT_BRANCH)
|
||||
return current_version, mylar.GIT_BRANCH
|
||||
if mylar.CONFIG.GIT_BRANCH:
|
||||
logger.info('Branch detected & set to : ' + mylar.CONFIG.GIT_BRANCH)
|
||||
return current_version, mylar.CONFIG.GIT_BRANCH
|
||||
else:
|
||||
logger.warn('No branch specified within config - will attempt to poll version from mylar')
|
||||
try:
|
||||
|
@ -159,7 +158,7 @@ def getVersion():
|
|||
def checkGithub():
|
||||
|
||||
# Get the latest commit available from github
|
||||
url = 'https://api.github.com/repos/%s/mylar/commits/%s' % (mylar.GIT_USER, mylar.GIT_BRANCH)
|
||||
url = 'https://api.github.com/repos/%s/mylar/commits/%s' % (mylar.CONFIG.GIT_USER, mylar.CONFIG.GIT_BRANCH)
|
||||
logger.info ('Retrieving latest version information from github')
|
||||
try:
|
||||
response = requests.get(url, verify=True)
|
||||
|
@ -173,7 +172,7 @@ def checkGithub():
|
|||
# See how many commits behind we are
|
||||
if mylar.CURRENT_VERSION:
|
||||
logger.fdebug('Comparing currently installed version [' + mylar.CURRENT_VERSION + '] with latest github version [' + mylar.LATEST_VERSION +']')
|
||||
url = 'https://api.github.com/repos/%s/mylar/compare/%s...%s' % (mylar.GIT_USER, mylar.CURRENT_VERSION, mylar.LATEST_VERSION)
|
||||
url = 'https://api.github.com/repos/%s/mylar/compare/%s...%s' % (mylar.CONFIG.GIT_USER, mylar.CURRENT_VERSION, mylar.LATEST_VERSION)
|
||||
|
||||
try:
|
||||
response = requests.get(url, verify=True)
|
||||
|
@ -207,23 +206,23 @@ def update():
|
|||
|
||||
elif mylar.INSTALL_TYPE == 'git':
|
||||
|
||||
output, err = runGit('pull origin ' + mylar.GIT_BRANCH)
|
||||
output, err = runGit('pull origin ' + mylar.CONFIG.GIT_BRANCH)
|
||||
|
||||
if not output:
|
||||
logger.error('Couldn\'t download latest version')
|
||||
|
||||
for line in output.split('\n'):
|
||||
|
||||
|
||||
if 'Already up-to-date.' in line:
|
||||
logger.info('No update available, not updating')
|
||||
logger.info('Output: ' + str(output))
|
||||
elif line.endswith('Aborting.'):
|
||||
logger.error('Unable to update from git: ' +line)
|
||||
logger.info('Output: ' + str(output))
|
||||
|
||||
|
||||
else:
|
||||
|
||||
tar_download_url = 'https://github.com/%s/mylar/tarball/%s' % (mylar.GIT_USER, mylar.GIT_BRANCH)
|
||||
tar_download_url = 'https://github.com/%s/mylar/tarball/%s' % (mylar.CONFIG.GIT_USER, mylar.CONFIG.GIT_BRANCH)
|
||||
update_dir = os.path.join(mylar.PROG_DIR, 'update')
|
||||
version_path = os.path.join(mylar.PROG_DIR, 'version.txt')
|
||||
|
||||
|
@ -235,7 +234,7 @@ def update():
|
|||
return
|
||||
|
||||
#try sanitizing the name here...
|
||||
download_name = mylar.GIT_BRANCH + '-github' #data.geturl().split('/')[-1].split('?')[0]
|
||||
download_name = mylar.CONFIG.GIT_BRANCH + '-github' #data.geturl().split('/')[-1].split('?')[0]
|
||||
tar_download_path = os.path.join(mylar.PROG_DIR, download_name)
|
||||
|
||||
# Save tar to disk
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -111,7 +111,7 @@ def initialize(options):
|
|||
},
|
||||
'/cache': {
|
||||
'tools.staticdir.on': True,
|
||||
'tools.staticdir.dir': mylar.CACHE_DIR,
|
||||
'tools.staticdir.dir': mylar.CONFIG.CACHE_DIR,
|
||||
'tools.auth_basic.on': False
|
||||
}
|
||||
}
|
||||
|
|
|
@ -54,14 +54,14 @@ def pullit(forcecheck=None):
|
|||
if pulldate is None: pulldate = '00000000'
|
||||
|
||||
#only for pw-file or ALT_PULL = 1
|
||||
newrl = os.path.join(mylar.CACHE_DIR, 'newreleases.txt')
|
||||
newrl = os.path.join(mylar.CONFIG.CACHE_DIR, 'newreleases.txt')
|
||||
mylar.PULLBYFILE = False
|
||||
|
||||
if mylar.ALT_PULL == 1:
|
||||
if mylar.CONFIG.ALT_PULL == 1:
|
||||
#logger.info('[PULL-LIST] The Alt-Pull method is currently broken. Defaulting back to the normal method of grabbing the pull-list.')
|
||||
logger.info('[PULL-LIST] Populating & Loading pull-list data directly from webpage')
|
||||
newpull.newpull()
|
||||
elif mylar.ALT_PULL == 2:
|
||||
elif mylar.CONFIG.ALT_PULL == 2:
|
||||
logger.info('[PULL-LIST] Populating & Loading pull-list data directly from alternate website')
|
||||
chk_locg = locg.locg('00000000') #setting this to 00000000 will do a Recreate on every call instead of a Refresh
|
||||
if chk_locg['status'] == 'up2date':
|
||||
|
@ -85,8 +85,8 @@ def pullit(forcecheck=None):
|
|||
#newtxtfile header info ("SHIPDATE\tPUBLISHER\tISSUE\tCOMIC\tEXTRA\tSTATUS\n")
|
||||
#STATUS denotes default status to be applied to pulllist in Mylar (default = Skipped)
|
||||
|
||||
if mylar.ALT_PULL != 2 or mylar.PULLBYFILE is True:
|
||||
newfl = os.path.join(mylar.CACHE_DIR, 'Clean-newreleases.txt')
|
||||
if mylar.CONFIG.ALT_PULL != 2 or mylar.PULLBYFILE is True:
|
||||
newfl = os.path.join(mylar.CONFIG.CACHE_DIR, 'Clean-newreleases.txt')
|
||||
|
||||
newtxtfile = open(newfl, 'wb')
|
||||
|
||||
|
@ -381,7 +381,7 @@ def pullit(forcecheck=None):
|
|||
dupefound = "no"
|
||||
|
||||
#-- remove html tags when alt_pull is enabled
|
||||
if mylar.ALT_PULL == 1:
|
||||
if mylar.CONFIG.ALT_PULL == 1:
|
||||
if '&' in comicnm:
|
||||
comicnm = re.sub('&', '&', comicnm).strip()
|
||||
if '&' in pub:
|
||||
|
@ -418,7 +418,7 @@ def pullit(forcecheck=None):
|
|||
|
||||
newtxtfile.close()
|
||||
|
||||
if all([pulldate == '00000000', mylar.ALT_PULL != 2]) or mylar.PULLBYFILE is True:
|
||||
if all([pulldate == '00000000', mylar.CONFIG.ALT_PULL != 2]) or mylar.PULLBYFILE is True:
|
||||
pulldate = shipdate
|
||||
|
||||
try:
|
||||
|
@ -461,12 +461,12 @@ def pullit(forcecheck=None):
|
|||
t+=1
|
||||
csvfile.close()
|
||||
#let's delete the files
|
||||
os.remove(os.path.join(mylar.CACHE_DIR, 'Clean-newreleases.txt'))
|
||||
os.remove(os.path.join(mylar.CACHE_DIR, 'newreleases.txt'))
|
||||
os.remove(os.path.join(mylar.CONFIG.CACHE_DIR, 'Clean-newreleases.txt'))
|
||||
os.remove(os.path.join(mylar.CONFIG.CACHE_DIR, 'newreleases.txt'))
|
||||
|
||||
logger.info(u"Weekly Pull List successfully loaded.")
|
||||
|
||||
if mylar.ALT_PULL != 2 or mylar.PULLBYFILE is True:
|
||||
if mylar.CONFIG.ALT_PULL != 2 or mylar.PULLBYFILE is True:
|
||||
pullitcheck(forcecheck=forcecheck)
|
||||
|
||||
def pullitcheck(comic1off_name=None, comic1off_id=None, forcecheck=None, futurepull=None, issue=None):
|
||||
|
@ -626,7 +626,7 @@ def pullitcheck(comic1off_name=None, comic1off_id=None, forcecheck=None, futurep
|
|||
if w > 0:
|
||||
while (cnt > -1):
|
||||
latestiss = latestissue[cnt]
|
||||
if mylar.ALT_PULL != 2:
|
||||
if mylar.CONFIG.ALT_PULL != 2:
|
||||
lines[cnt] = lines[cnt].upper()
|
||||
#llen[cnt] = str(llen[cnt])
|
||||
logger.fdebug("looking for : " + lines[cnt])
|
||||
|
@ -665,7 +665,7 @@ def pullitcheck(comic1off_name=None, comic1off_id=None, forcecheck=None, futurep
|
|||
logger.fdebug("comicnm : " + comicnm) # / mod :" + str(modcomicnm))
|
||||
|
||||
if dyn_comicnm == dyn_watchnm:
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
if 'annual' in watchcomic.lower() and 'annual' not in comicnm.lower():
|
||||
logger.fdebug('Annual detected in issue, but annuals are not enabled and no series match in wachlist.')
|
||||
continue
|
||||
|
@ -781,7 +781,7 @@ def pullitcheck(comic1off_name=None, comic1off_id=None, forcecheck=None, futurep
|
|||
watchfnd.append(comicnm)
|
||||
watchfndiss.append(week['ISSUE'])
|
||||
ComicID = comicid[cnt]
|
||||
if not mylar.CV_ONLY:
|
||||
if not mylar.CONFIG.CV_ONLY:
|
||||
ComicIssue = str(watchfndiss[tot -1] + ".00")
|
||||
else:
|
||||
ComicIssue = str(watchfndiss[tot -1])
|
||||
|
@ -937,7 +937,7 @@ def new_pullcheck(weeknumber, pullyear, comic1off_name=None, comic1off_id=None,
|
|||
break
|
||||
#logger.info('looking for ' + week['ComicName'] + ' [' + week['comicid'] + ']')
|
||||
idmatch = [x for x in weeklylist if week['comicid'] is not None and int(x['ComicID']) == int(week['comicid'])]
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
annualidmatch = [x for x in weeklylist if week['comicid'] is not None and ([xa for xa in x['AnnualIDs'] if int(xa['ComicID']) == int(week['comicid'])])]
|
||||
#The above will auto-match against ComicID if it's populated on the pullsite, otherwise do name-matching.
|
||||
namematch = [ab for ab in weeklylist if ab['DynamicName'] == week['dynamicname']]
|
||||
|
@ -954,7 +954,7 @@ def new_pullcheck(weeknumber, pullyear, comic1off_name=None, comic1off_id=None,
|
|||
elif annualidmatch:
|
||||
comicname = annualidmatch[0]['AnnualIDs'][0]['ComicName'].strip()
|
||||
latestiss = annualidmatch[0]['latestIssue'].strip()
|
||||
if mylar.ANNUALS_ON:
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
comicid = annualidmatch[0]['ComicID'].strip()
|
||||
else:
|
||||
comicid = annualidmatch[0]['AnnualIDs'][0]['ComicID'].strip()
|
||||
|
@ -1104,7 +1104,7 @@ def new_pullcheck(weeknumber, pullyear, comic1off_name=None, comic1off_id=None,
|
|||
cst = cstatus
|
||||
newValue['Status'] = cst
|
||||
else:
|
||||
if mylar.AUTOWANT_UPCOMING:
|
||||
if mylar.CONFIG.AUTOWANT_UPCOMING:
|
||||
newValue['Status'] = 'Wanted'
|
||||
else:
|
||||
newValue['Status'] = 'Skipped'
|
||||
|
@ -1141,7 +1141,7 @@ def new_pullcheck(weeknumber, pullyear, comic1off_name=None, comic1off_id=None,
|
|||
#make sure the status is Wanted and that the issue status is identical if not.
|
||||
newStat = {'Status': 'Wanted'}
|
||||
ctrlStat = {'IssueID': issueid}
|
||||
if all([annualidmatch, mylar.ANNUALS_ON]):
|
||||
if all([annualidmatch, mylar.CONFIG.ANNUALS_ON]):
|
||||
myDB.upsert("annuals", newStat, ctrlStat)
|
||||
else:
|
||||
myDB.upsert("issues", newStat, ctrlStat)
|
||||
|
@ -1272,22 +1272,22 @@ def weekly_check(comicid, issuenum, file=None, path=None, module=None, issueid=N
|
|||
|
||||
weekinfo = helpers.weekly_info(chkit['weeknumber'],chkit['year'])
|
||||
|
||||
if mylar.WEEKFOLDER:
|
||||
if mylar.CONFIG.WEEKFOLDER:
|
||||
weekly_singlecopy(comicid, issuenum, file, path, weekinfo)
|
||||
if mylar.SEND2READ:
|
||||
if mylar.CONFIG.SEND2READ:
|
||||
send2read(comicid, issueid, issuenum)
|
||||
return
|
||||
|
||||
def weekly_singlecopy(comicid, issuenum, file, path, weekinfo):
|
||||
|
||||
module = '[WEEKLY-PULL COPY]'
|
||||
if mylar.WEEKFOLDER:
|
||||
if mylar.WEEKFOLDER_LOC:
|
||||
weekdst = mylar.WEEKFOLDER_LOC
|
||||
if mylar.CONFIG.WEEKFOLDER:
|
||||
if mylar.CONFIG.WEEKFOLDER_LOC:
|
||||
weekdst = mylar.CONFIG.WEEKFOLDER_LOC
|
||||
else:
|
||||
weekdst = mylar.DESTINATION_DIR
|
||||
weekdst = mylar.CONFIG.DESTINATION_DIR
|
||||
|
||||
if mylar.WEEKFOLDER_FORMAT == 0:
|
||||
if mylar.CONFIG.WEEKFOLDER_FORMAT == 0:
|
||||
desdir = os.path.join(weekdst, str( str(weekinfo['year']) + '-' + str(weekinfo['weeknumber']) ))
|
||||
else:
|
||||
desdir = os.path.join(weekdst, str( str(weekinfo['midweek']) ))
|
||||
|
@ -1296,10 +1296,10 @@ def weekly_singlecopy(comicid, issuenum, file, path, weekinfo):
|
|||
if dircheck:
|
||||
pass
|
||||
else:
|
||||
desdir = mylar.DESTINATION_DIR
|
||||
desdir = mylar.CONFIG.DESTINATION_DIR
|
||||
|
||||
else:
|
||||
desdir = mylar.GRABBAG_DIR
|
||||
desdir = mylar.CONFIG.GRABBAG_DIR
|
||||
|
||||
desfile = os.path.join(desdir, file)
|
||||
srcfile = os.path.join(path)
|
||||
|
@ -1316,7 +1316,7 @@ def weekly_singlecopy(comicid, issuenum, file, path, weekinfo):
|
|||
def send2read(comicid, issueid, issuenum):
|
||||
|
||||
module = '[READLIST]'
|
||||
if mylar.SEND2READ:
|
||||
if mylar.CONFIG.SEND2READ:
|
||||
logger.info(module + " Send to Reading List enabled for new pulls. Adding to your readlist in the status of 'Added'")
|
||||
if issueid is None:
|
||||
chkthis = myDB.selectone('SELECT * FROM issues WHERE ComicID=? AND Int_IssueNumber=?', [comicid, helpers.issuedigits(issuenum)]).fetchone()
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
##-- start configuration
|
||||
|
||||
#this needs to be edited to the full path to the get.conf file containing the torrent client information
|
||||
configfile='/opt/mylar/post-processing/torrent-auto-snatch/get.conf'
|
||||
configfile='/home/hero/mylar/post-processing/torrent-auto-snatch/get.conf'
|
||||
|
||||
#this is the temporary location where it will make sure the conf is safe for use (by default this should be fine if left alone)
|
||||
configfile_secured='/tmp/get.conf'
|
||||
|
|
Loading…
Reference in New Issue