FIX: Corrected issues in an arc being set to None when initially adding an arc, IMP: Changed arc searches & one-off pull-list searches to both use the search-queue, and other queues as required, IMP: Changed 'search for missing' in the arc options to follow search-queue workflow, IMP: Allowed arcs / pullist one-offs to be searched based on one-shot inclusion (ie. no issue number for one-shot issues), IMP: formatted logging lines for arcs to avoid logging errors, IMP: added code to allow requeuing of ddl downloads that fail to resume from point of failure, IMP: added code to display current percentage of ddl progress, IMP: added 'clear status' option to arc details to clear current status of a given issue within an arc, FIX: allow for series within an arc that don't populate a year properly to still have a year designated, IMP: Removed SSL tick box from rtorrent GUI configuration. If previously enabled, will auto-correct host to use https upon initial startup if required,

IMP: When adding a series, if there's only one issue available (and it's not ongoing) - mark it as one-shot edition to allow for snatches with no issue number,
IMP: Ignore Week+/Week packs for the time being when using the DDL provider option, FIX: When weekly pull could not be retrieved, would drop to alt_pull=0 option temporarily. If config was saved, would overwrite current alt_pull setting and cause subsequent problems when retrieving the pull, FIX: Fixed some post-processing problems when post-processing story-arc issues
This commit is contained in:
evilhero 2019-02-19 12:02:05 -05:00
parent 47477eea26
commit e0f62ba173
12 changed files with 456 additions and 290 deletions

View File

@ -584,14 +584,12 @@
<div class="row"> <div class="row">
<label>rTorrent Host:port(optional)</label> <label>rTorrent Host:port(optional)</label>
<input type="text" id="rtorrent_host" name="rtorrent_host" value="${config['rtorrent_host']}" size="30"> <input type="text" id="rtorrent_host" name="rtorrent_host" value="${config['rtorrent_host']}" size="30">
<small>ie. my.rtorrent:80, 192.168.1.1, scgi://localhost:5000</small> <small>ie. https://my.rtorrent, http://192.168.1.1, http://localhost:80, scgi://localhost:5000</small>
</div>
<div class="row checkbox left clearfix">
<input id="rtorrent_ssl" type="checkbox" onclick="initConfigCheckbox($this));" name="rtorrent_ssl" value="1" ${config['rtorrent_ssl']} /><label>SSL</label>
</div> </div>
<div class="config"> <div class="config">
<div class="row checkbox left clearfix"> <div class="row checkbox left clearfix">
<input id="rtorrent_verify" type="checkbox" name="rtorrent_verify" value="1" ${config['rtorrent_verify']} /><label>Verify SSL</label> <input id="rtorrent_verify" type="checkbox" name="rtorrent_verify" value="1" ${config['rtorrent_verify']} /><label>Verify SSL</label>
<small>(only optionally used for https connections)</small>
</div> </div>
</div> </div>
<div class="row"> <div class="row">
@ -2200,10 +2198,9 @@
var password = document.getElementById("rtorrent_password").value; var password = document.getElementById("rtorrent_password").value;
var auth = document.getElementById("rtorrent_authentication").value; var auth = document.getElementById("rtorrent_authentication").value;
var verify = document.getElementById("rtorrent_verify").value; var verify = document.getElementById("rtorrent_verify").value;
var ssl = document.getElementById("rtorrent_ssl").value;
var rpc_url = document.getElementById("rtorrent_rpc_url").value; var rpc_url = document.getElementById("rtorrent_rpc_url").value;
$.get("testrtorrent", $.get("testrtorrent",
{ host: host, username: username, password: password, auth: auth, verify: verify, ssl: ssl, rpc_url: rpc_url }, { host: host, username: username, password: password, auth: auth, verify: verify, rpc_url: rpc_url },
function(data){ function(data){
if (data.error != undefined) { if (data.error != undefined) {
alert(data.error); alert(data.error);
@ -2480,7 +2477,6 @@
initConfigCheckbox("#enforce_perms"); initConfigCheckbox("#enforce_perms");
initConfigCheckbox("#enable_api"); initConfigCheckbox("#enable_api");
initConfigCheckbox("#sab_to_mylar"); initConfigCheckbox("#sab_to_mylar");
initConfigCheckbox("#rtorrent_ssl");
initConfigCheckbox("#usenewznab"); initConfigCheckbox("#usenewznab");
initConfigCheckbox("#enable_torznab"); initConfigCheckbox("#enable_torznab");
initConfigCheckbox("#usenzbsu"); initConfigCheckbox("#usenzbsu");

View File

@ -208,6 +208,7 @@
<a href="#" onclick="doAjaxCall('queueit?ComicName=${item['ComicName'] | u}&ComicIssue=${item['IssueNumber']}&ComicYear=${issuedate}&mode=readlist&SARC=${item['StoryArc']}&IssueArcID=${item['IssueArcID']}&SeriesYear=${item['SeriesYear']}',$(this),'table')" data-success="Now searching for ${item['ComicName']} #${item['IssueNumber']}"><span class="ui-icon ui-icon-plus"></span>Grab</a> <a href="#" onclick="doAjaxCall('queueit?ComicName=${item['ComicName'] | u}&ComicIssue=${item['IssueNumber']}&ComicYear=${issuedate}&mode=readlist&SARC=${item['StoryArc']}&IssueArcID=${item['IssueArcID']}&SeriesYear=${item['SeriesYear']}',$(this),'table')" data-success="Now searching for ${item['ComicName']} #${item['IssueNumber']}"><span class="ui-icon ui-icon-plus"></span>Grab</a>
%elif item['Status'] == 'Snatched': %elif item['Status'] == 'Snatched':
<a href="#" onclick="doAjaxCall('queueit?ComicName=${item['ComicName'] | u}&ComicIssue=${item['IssueNumber']}&ComicYear=${issuedate}&mode=readlist&SARC=${item['StoryArc']}&IssueArcID=${item['IssueArcID']}&SeriesYear=${item['SeriesYear']}',$(this),'table')" data-success="Trying to search again for issue"><span class="ui-icon ui-icon-plus"></span>Retry</a> <a href="#" onclick="doAjaxCall('queueit?ComicName=${item['ComicName'] | u}&ComicIssue=${item['IssueNumber']}&ComicYear=${issuedate}&mode=readlist&SARC=${item['StoryArc']}&IssueArcID=${item['IssueArcID']}&SeriesYear=${item['SeriesYear']}',$(this),'table')" data-success="Trying to search again for issue"><span class="ui-icon ui-icon-plus"></span>Retry</a>
<a href="#" onclick="doAjaxCall('clear_arcstatus?issuearcid=${item['IssueArcID']}',$(this),'table')"><data success="Clearing status of ${item['Status']} for ${item['ComicName']} #${item['IssueNumber']}"><span class="ui-icon ui-icon-plus"></span>Clear Status</a>
%elif item['Status'] == 'Downloaded' and item['Location'] is not None: %elif item['Status'] == 'Downloaded' and item['Location'] is not None:
<a href="downloadthis?pathfile=${item['Location'] |u}"><span class="ui-icon ui-icon-plus"></span>Download</a> <a href="downloadthis?pathfile=${item['Location'] |u}"><span class="ui-icon ui-icon-plus"></span>Download</a>
%endif %endif

View File

@ -202,6 +202,7 @@
<a href="#" onclick="doAjaxCall('queueit?ComicName=${item['ComicName'] | u}&ComicIssue=${item['IssueNumber']}&ComicYear=${issuedate}&mode=readlist&SARC=${item['StoryArc']}&IssueArcID=${item['IssueArcID']}&SeriesYear=${item['SeriesYear']}',$(this),'table')" data-success="Now searching for ${item['ComicName']} #${item['IssueNumber']}"><span class="ui-icon ui-icon-plus"></span>Grab</a> <a href="#" onclick="doAjaxCall('queueit?ComicName=${item['ComicName'] | u}&ComicIssue=${item['IssueNumber']}&ComicYear=${issuedate}&mode=readlist&SARC=${item['StoryArc']}&IssueArcID=${item['IssueArcID']}&SeriesYear=${item['SeriesYear']}',$(this),'table')" data-success="Now searching for ${item['ComicName']} #${item['IssueNumber']}"><span class="ui-icon ui-icon-plus"></span>Grab</a>
%elif item['Status'] == 'Snatched': %elif item['Status'] == 'Snatched':
<a href="#" onclick="doAjaxCall('queueit?ComicName=${item['ComicName'] | u}&ComicIssue=${item['IssueNumber']}&ComicYear=${issuedate}&mode=readlist&SARC=${item['StoryArc']}&IssueArcID=${item['IssueArcID']}&SeriesYear=${item['SeriesYear']}',$(this),'table')" data-success="Trying to search again for issue"><span class="ui-icon ui-icon-plus"></span>Retry</a> <a href="#" onclick="doAjaxCall('queueit?ComicName=${item['ComicName'] | u}&ComicIssue=${item['IssueNumber']}&ComicYear=${issuedate}&mode=readlist&SARC=${item['StoryArc']}&IssueArcID=${item['IssueArcID']}&SeriesYear=${item['SeriesYear']}',$(this),'table')" data-success="Trying to search again for issue"><span class="ui-icon ui-icon-plus"></span>Retry</a>
<a href="#" onclick="doAjaxCall('clear_arcstatus?issuearcid=${item['IssueArcID']}',$(this),'table')"><data success="Clearing status of ${item['Status']} for ${item['ComicName']} #${item['IssueNumber']}"><span class="ui-icon ui-icon-plus"></span>Clear Status</a>
%elif item['Status'] == 'Downloaded' and item['Location'] is not None: %elif item['Status'] == 'Downloaded' and item['Location'] is not None:
<a href="downloadthis?pathfile=${item['Location'] |u}"><span class="ui-icon ui-icon-plus"></span>Download</a> <a href="downloadthis?pathfile=${item['Location'] |u}"><span class="ui-icon ui-icon-plus"></span>Download</a>
%endif %endif

View File

@ -143,7 +143,7 @@
<td class="options"> <td class="options">
%if weekly['HAVEIT'] == 'OneOff': %if weekly['HAVEIT'] == 'OneOff':
%if weekly['STATUS'] == 'Snatched' or weekly['STATUS'] == 'Downloaded': %if weekly['STATUS'] == 'Snatched' or weekly['STATUS'] == 'Downloaded':
<a href="#" onclick="doAjaxCall('queueit?ComicName=${weekly['COMIC'] | u}&ComicID=${weekly['COMICID']}&IssueID=${weekly['ISSUEID']}&ComicIssue=${weekly['ISSUE']}&mode=pullwant&Publisher=${weekly['PUBLISHER']}&pullinfo=${weekinfo['midweek']}&pullweek=${weekinfo['weeknumber']}&pullyear=${weekinfo['year']}',$(this),'table')" data-success="Successfully submitted search request for ${weekly['COMIC']} #${weekly['ISSUE']}" title="Snatch issue again as a One-Off"> <a href="#" onclick="doAjaxCall('queueit?ComicName=${weekly['COMIC'] | u}&ComicID=${weekly['COMICID']}&IssueID=${weekly['ISSUEID']}&ComicIssue=${weekly['ISSUE']}&mode=pullwant&Publisher=${weekly['PUBLISHER']}&pullinfo=${weekinfo['midweek']}&pullweek=${weekinfo['weeknumber']}&pullyear=${weekinfo['year']}&BookType=${weekly['FORMAT']}',$(this),'table')" data-success="Successfully submitted search request for ${weekly['COMIC']} #${weekly['ISSUE']}" title="Snatch issue again as a One-Off">
%if mylar.CONFIG.SHOW_ICONS: %if mylar.CONFIG.SHOW_ICONS:
<img style="margin: 0px 5px" src="interfaces/default/images/retry.png" height="25" width="25" class="highqual" /> <img style="margin: 0px 5px" src="interfaces/default/images/retry.png" height="25" width="25" class="highqual" />
%else: %else:
@ -193,7 +193,7 @@
<% dl = False %> <% dl = False %>
%endif %endif
%if weekly['HAVEIT'] == 'No' and weekly['STATUS'] == 'Skipped': %if weekly['HAVEIT'] == 'No' and weekly['STATUS'] == 'Skipped':
<a href="#" onclick="doAjaxCall('queueit?ComicName=${weekly['COMIC'] | u}&ComicID=${weekly['COMICID']}&IssueID=${weekly['ISSUEID']}&ComicIssue=${weekly['ISSUE']}&mode=pullwant&Publisher=${weekly['PUBLISHER']}&pullinfo=${weekinfo['midweek']}&pullweek=${weekinfo['weeknumber']}&pullyear=${weekinfo['year']}',$(this),'table')" data-success="Successfully submitted search request for ${weekly['COMIC']} #${weekly['ISSUE']}" title="One off download"> <a href="#" onclick="doAjaxCall('queueit?ComicName=${weekly['COMIC'] | u}&ComicID=${weekly['COMICID']}&IssueID=${weekly['ISSUEID']}&ComicIssue=${weekly['ISSUE']}&mode=pullwant&Publisher=${weekly['PUBLISHER']}&pullinfo=${weekinfo['midweek']}&pullweek=${weekinfo['weeknumber']}&pullyear=${weekinfo['year']}&BookType=${weekly['FORMAT']}',$(this),'table')" data-success="Successfully submitted search request for ${weekly['COMIC']} #${weekly['ISSUE']}" title="One off download">
%if mylar.CONFIG.SHOW_ICONS: %if mylar.CONFIG.SHOW_ICONS:
<img style="margin: 0px 5px" src="interfaces/default/images/search.png" height="25" width="25" class="highqual" /> <img style="margin: 0px 5px" src="interfaces/default/images/search.png" height="25" width="25" class="highqual" />
%else: %else:
@ -233,7 +233,7 @@
%endif %endif
</a> </a>
%elif weekly['STATUS'] == 'Snatched': %elif weekly['STATUS'] == 'Snatched':
<a href="#" onclick="doAjaxCall('queueit?ComicName=${weekly['COMIC'] | u}&ComicID=${weekly['COMICID']}&IssueID=${weekly['ISSUEID']}&ComicIssue=${weekly['ISSUE']}&mode=pullwant&Publisher=${weekly['PUBLISHER']}&pullinfo=${weekinfo['midweek']}&pullweek=${weekinfo['weeknumber']}&pullyear=${weekinfo['year']}',$(this),'table')" data-success="Successfully submitted search request for ${weekly['COMIC']} #${weekly['ISSUE']}" title="Snatch issue again"> <a href="#" onclick="doAjaxCall('queueit?ComicName=${weekly['COMIC'] | u}&ComicID=${weekly['COMICID']}&IssueID=${weekly['ISSUEID']}&ComicIssue=${weekly['ISSUE']}&mode=pullwant&Publisher=${weekly['PUBLISHER']}&pullinfo=${weekinfo['midweek']}&pullweek=${weekinfo['weeknumber']}&pullyear=${weekinfo['year']}&BookType=${weekly['FORMAT']}',$(this),'table')" data-success="Successfully submitted search request for ${weekly['COMIC']} #${weekly['ISSUE']}" title="Snatch issue again">
%if mylar.CONFIG.SHOW_ICONS: %if mylar.CONFIG.SHOW_ICONS:
<img style="margin: 0px 5px" src="interfaces/default/images/retry.png" height="25" width="25" class="highqual" /> <img style="margin: 0px 5px" src="interfaces/default/images/retry.png" height="25" width="25" class="highqual" />
%else: %else:

View File

@ -1436,55 +1436,60 @@ class PostProcessor(object):
if (all([self.nzb_name != 'Manual Run', self.apicall is False]) or (self.oneoffinlist is True or all([self.issuearcid is not None, self.issueid is None]))) and not self.nzb_name.startswith('0-Day'): # and all([self.issueid is None, self.comicid is None, self.apicall is False]): if (all([self.nzb_name != 'Manual Run', self.apicall is False]) or (self.oneoffinlist is True or all([self.issuearcid is not None, self.issueid is None]))) and not self.nzb_name.startswith('0-Day'): # and all([self.issueid is None, self.comicid is None, self.apicall is False]):
ppinfo = [] ppinfo = []
if self.oneoffinlist is False: if self.oneoffinlist is False:
nzbname = self.nzb_name
#remove extensions from nzb_name if they somehow got through (Experimental most likely)
if nzbname.lower().endswith(self.extensions):
fd, ext = os.path.splitext(nzbname)
self._log("Removed extension from nzb: " + ext)
nzbname = re.sub(str(ext), '', str(nzbname))
#replace spaces
# let's change all space to decimals for simplicity
logger.fdebug('[NZBNAME]: ' + nzbname)
#gotta replace & or escape it
nzbname = re.sub("\&", 'and', nzbname)
nzbname = re.sub('[\,\:\?\'\+]', '', nzbname)
nzbname = re.sub('[\(\)]', ' ', nzbname)
logger.fdebug('[NZBNAME] nzbname (remove chars): ' + nzbname)
nzbname = re.sub('.cbr', '', nzbname).strip()
nzbname = re.sub('.cbz', '', nzbname).strip()
nzbname = re.sub('[\.\_]', ' ', nzbname).strip()
nzbname = re.sub('\s+', ' ', nzbname) #make sure we remove the extra spaces.
logger.fdebug('[NZBNAME] nzbname (remove extensions, double spaces, convert underscores to spaces): ' + nzbname)
nzbname = re.sub('\s', '.', nzbname)
logger.fdebug('%s After conversions, nzbname is : %s' % (module, nzbname))
# if mylar.USE_NZBGET==1:
# nzbname=self.nzb_name
self._log("nzbname: %s" % nzbname)
nzbiss = myDB.selectone("SELECT * from nzblog WHERE nzbname=? or altnzbname=?", [nzbname, nzbname]).fetchone()
self.oneoff = False self.oneoff = False
if nzbiss is None: if any([self.issueid is not None, self.issuearcid is not None]):
self._log("Failure - could not initially locate nzbfile in my database to rename.") if self.issueid is not None:
logger.fdebug('%s Failure - could not locate nzbfile initially' % module) s_id = self.issueid
# if failed on spaces, change it all to decimals and try again.
nzbname = re.sub('[\(\)]', '', str(nzbname))
self._log("trying again with this nzbname: %s" % nzbname)
logger.fdebug('%s Trying to locate nzbfile again with nzbname of : %s' % (module, nzbname))
nzbiss = myDB.selectone("SELECT * from nzblog WHERE nzbname=? or altnzbname=?", [nzbname, nzbname]).fetchone()
if nzbiss is None:
logger.error('%s Unable to locate downloaded file within items I have snatched. Attempting to parse the filename directly and process.' % module)
#set it up to run manual post-processing on self.nzb_folder
self._log('Unable to locate downloaded file within items I have snatched. Attempting to parse the filename directly and process.')
self.valreturn.append({"self.log": self.log,
"mode": 'outside'})
return self.queue.put(self.valreturn)
else: else:
self._log("I corrected and found the nzb as : %s" % nzbname) s_id = self.issuearcid
logger.fdebug('%s Auto-corrected and found the nzb as : %s' % (module, nzbname)) nzbiss = myDB.selectone('SELECT * FROM nzblog WHERE IssueID=?', [s_id]).fetchone()
#issueid = nzbiss['IssueID'] else:
nzbname = self.nzb_name
#remove extensions from nzb_name if they somehow got through (Experimental most likely)
if nzbname.lower().endswith(self.extensions):
fd, ext = os.path.splitext(nzbname)
self._log("Removed extension from nzb: " + ext)
nzbname = re.sub(str(ext), '', str(nzbname))
#replace spaces
# let's change all space to decimals for simplicity
logger.fdebug('[NZBNAME]: ' + nzbname)
#gotta replace & or escape it
nzbname = re.sub("\&", 'and', nzbname)
nzbname = re.sub('[\,\:\?\'\+]', '', nzbname)
nzbname = re.sub('[\(\)]', ' ', nzbname)
logger.fdebug('[NZBNAME] nzbname (remove chars): ' + nzbname)
nzbname = re.sub('.cbr', '', nzbname).strip()
nzbname = re.sub('.cbz', '', nzbname).strip()
nzbname = re.sub('[\.\_]', ' ', nzbname).strip()
nzbname = re.sub('\s+', ' ', nzbname) #make sure we remove the extra spaces.
logger.fdebug('[NZBNAME] nzbname (remove extensions, double spaces, convert underscores to spaces): ' + nzbname)
nzbname = re.sub('\s', '.', nzbname)
logger.fdebug('%s After conversions, nzbname is : %s' % (module, nzbname))
self._log("nzbname: %s" % nzbname)
nzbiss = myDB.selectone("SELECT * from nzblog WHERE nzbname=? or altnzbname=?", [nzbname, nzbname]).fetchone()
if nzbiss is None:
self._log("Failure - could not initially locate nzbfile in my database to rename.")
logger.fdebug('%s Failure - could not locate nzbfile initially' % module)
# if failed on spaces, change it all to decimals and try again.
nzbname = re.sub('[\(\)]', '', str(nzbname))
self._log("trying again with this nzbname: %s" % nzbname)
logger.fdebug('%s Trying to locate nzbfile again with nzbname of : %s' % (module, nzbname))
nzbiss = myDB.selectone("SELECT * from nzblog WHERE nzbname=? or altnzbname=?", [nzbname, nzbname]).fetchone()
if nzbiss is None:
logger.error('%s Unable to locate downloaded file within items I have snatched. Attempting to parse the filename directly and process.' % module)
#set it up to run manual post-processing on self.nzb_folder
self._log('Unable to locate downloaded file within items I have snatched. Attempting to parse the filename directly and process.')
self.valreturn.append({"self.log": self.log,
"mode": 'outside'})
return self.queue.put(self.valreturn)
else:
self._log("I corrected and found the nzb as : %s" % nzbname)
logger.fdebug('%s Auto-corrected and found the nzb as : %s' % (module, nzbname))
#issueid = nzbiss['IssueID']
issueid = nzbiss['IssueID'] issueid = nzbiss['IssueID']
logger.fdebug('%s Issueid: %s' % (module, issueid)) logger.fdebug('%s Issueid: %s' % (module, issueid))
@ -1940,7 +1945,8 @@ class PostProcessor(object):
'ComicName': tinfo['comicname'], 'ComicName': tinfo['comicname'],
'IssueNumber': tinfo['issuenumber'], 'IssueNumber': tinfo['issuenumber'],
'Publisher': tinfo['publisher'], 'Publisher': tinfo['publisher'],
'OneOff': tinfo['oneoff']} 'OneOff': tinfo['oneoff'],
'ForcedMatch': False}
else: else:

View File

@ -382,7 +382,7 @@ class Config(object):
count = sum(1 for line in open(self._config_file)) count = sum(1 for line in open(self._config_file))
else: else:
count = 0 count = 0
self.newconfig = 8 self.newconfig = 9
if count == 0: if count == 0:
CONFIG_VERSION = 0 CONFIG_VERSION = 0
MINIMALINI = False MINIMALINI = False
@ -502,13 +502,12 @@ class Config(object):
shutil.move(self._config_file, os.path.join(mylar.DATA_DIR, 'config.ini.backup')) shutil.move(self._config_file, os.path.join(mylar.DATA_DIR, 'config.ini.backup'))
except: except:
print('Unable to make proper backup of config file in %s' % os.path.join(mylar.DATA_DIR, 'config.ini.backup')) print('Unable to make proper backup of config file in %s' % os.path.join(mylar.DATA_DIR, 'config.ini.backup'))
if self.newconfig == 8: if self.newconfig == 9:
print('Attempting to update configuration..') print('Attempting to update configuration..')
#torznab multiple entries merged into extra_torznabs value #torznab multiple entries merged into extra_torznabs value
self.config_update() self.config_update()
setattr(self, 'CONFIG_VERSION', str(self.newconfig)) setattr(self, 'CONFIG_VERSION', str(self.newconfig))
config.set('General', 'CONFIG_VERSION', str(self.newconfig)) config.set('General', 'CONFIG_VERSION', str(self.newconfig))
print('Updating config to newest version : %s' % self.newconfig)
self.writeconfig() self.writeconfig()
else: else:
self.provider_sequence() self.provider_sequence()
@ -537,8 +536,8 @@ class Config(object):
return self return self
def config_update(self): def config_update(self):
if self.newconfig == 8: print('Updating Configuration from %s to %s' % (self.CONFIG_VERSION, self.newconfig))
print('Updating Configuration from %s to %s' % (self.CONFIG_VERSION, self.newconfig)) if self.CONFIG_VERSION < 8:
print('Checking for existing torznab configuration...') print('Checking for existing torznab configuration...')
if not any([self.TORZNAB_NAME is None, self.TORZNAB_HOST is None, self.TORZNAB_APIKEY is None, self.TORZNAB_CATEGORY is None]): if not any([self.TORZNAB_NAME is None, self.TORZNAB_HOST is None, self.TORZNAB_APIKEY is None, self.TORZNAB_CATEGORY is None]):
torznabs =[(self.TORZNAB_NAME, self.TORZNAB_HOST, self.TORZNAB_APIKEY, self.TORZNAB_CATEGORY, str(int(self.ENABLE_TORZNAB)))] torznabs =[(self.TORZNAB_NAME, self.TORZNAB_HOST, self.TORZNAB_APIKEY, self.TORZNAB_CATEGORY, str(int(self.ENABLE_TORZNAB)))]
@ -552,7 +551,14 @@ class Config(object):
config.remove_option('Torznab', 'torznab_apikey') config.remove_option('Torznab', 'torznab_apikey')
config.remove_option('Torznab', 'torznab_category') config.remove_option('Torznab', 'torznab_category')
config.remove_option('Torznab', 'torznab_verify') config.remove_option('Torznab', 'torznab_verify')
print('Successfully removed old entries.') print('Successfully removed outdated config entries.')
if self.newconfig == 9:
#rejig rtorrent settings due to change.
if all([self.RTORRENT_SSL is True, not self.RTORRENT_HOST.startswith('http')]):
self.RTORRENT_HOST = 'https://' + self.RTORRENT_HOST
config.set('Rtorrent', 'rtorrent_host', self.RTORRENT_HOST)
config.remove_option('Rtorrent', 'rtorrent_ssl')
print('Successfully removed oudated config entries.')
print('Configuration upgraded to version %s' % self.newconfig) print('Configuration upgraded to version %s' % self.newconfig)
def check_section(self, section, key): def check_section(self, section, key):
@ -731,6 +737,11 @@ class Config(object):
def configure(self, update=False): def configure(self, update=False):
#force alt_pull = 2 on restarts regardless of settings
if self.ALT_PULL != 2:
self.ALT_PULL = 2
config.set('Weekly', 'alt_pull', str(self.ALT_PULL))
try: try:
if not any([self.SAB_HOST is None, self.SAB_HOST == '', 'http://' in self.SAB_HOST[:7], 'https://' in self.SAB_HOST[:8]]): if not any([self.SAB_HOST is None, self.SAB_HOST == '', 'http://' in self.SAB_HOST[:7], 'https://' in self.SAB_HOST[:8]]):
self.SAB_HOST = 'http://' + self.SAB_HOST self.SAB_HOST = 'http://' + self.SAB_HOST

View File

@ -116,6 +116,9 @@ class GC(object):
title = re.sub(issues, '', title).strip() title = re.sub(issues, '', title).strip()
if title.endswith('#'): if title.endswith('#'):
title = title[:-1].strip() title = title[:-1].strip()
else:
if any(['Marvel Week+' in title, 'INDIE Week+' in title, 'Image Week' in title, 'DC Week+' in title]):
continue
option_find = f.find("p", {"style": "text-align: center;"}) option_find = f.find("p", {"style": "text-align: center;"})
i = 0 i = 0
@ -156,7 +159,6 @@ class GC(object):
logger.fdebug('%s [%s]' % (title, size)) logger.fdebug('%s [%s]' % (title, size))
results['entries'] = resultlist results['entries'] = resultlist
return results return results
def parse_downloadresults(self, id, mainlink): def parse_downloadresults(self, id, mainlink):
@ -236,13 +238,15 @@ class GC(object):
logger.fdebug('[%s] %s - %s' % (x['site'], x['volume'], x['link'])) logger.fdebug('[%s] %s - %s' % (x['site'], x['volume'], x['link']))
ctrlval = {'id': id} ctrlval = {'id': id}
vals = {'series': series, vals = {'series': series,
'year': year, 'year': year,
'size': size, 'size': size,
'issueid': self.issueid, 'issueid': self.issueid,
'comicid': self.comicid, 'comicid': self.comicid,
'link': link, 'link': link,
'status': 'Queued'} 'mainlink': mainlink,
'updated_date': datetime.datetime.now().strftime('%Y-%m-%d %H:%M'),
'status': 'Queued'}
myDB.upsert('ddl_info', vals, ctrlval) myDB.upsert('ddl_info', vals, ctrlval)
mylar.DDL_QUEUE.put({'link': link, mylar.DDL_QUEUE.put({'link': link,
@ -252,11 +256,12 @@ class GC(object):
'size': size, 'size': size,
'comicid': self.comicid, 'comicid': self.comicid,
'issueid': self.issueid, 'issueid': self.issueid,
'id': id}) 'id': id,
'resume': None})
return {'success': True} return {'success': True}
def downloadit(self, id, link, mainlink): def downloadit(self, id, link, mainlink, resume=None):
if mylar.DDL_LOCK is True: if mylar.DDL_LOCK is True:
logger.fdebug('[DDL] Another item is currently downloading via DDL. Only one item can be downloaded at a time using DDL. Patience.') logger.fdebug('[DDL] Another item is currently downloading via DDL. Only one item can be downloaded at a time using DDL. Patience.')
return return
@ -267,25 +272,47 @@ class GC(object):
filename = None filename = None
try: try:
with cfscrape.create_scraper() as s: with cfscrape.create_scraper() as s:
if resume is not None:
logger.info('[DDL-RESUME] Attempting to resume from: %s bytes' % resume)
self.headers['Range'] = 'bytes=%d-' % resume
cf_cookievalue, cf_user_agent = s.get_tokens(mainlink, headers=self.headers) cf_cookievalue, cf_user_agent = s.get_tokens(mainlink, headers=self.headers)
t = s.get(link, verify=True, cookies=cf_cookievalue, headers=self.headers, stream=True) t = s.get(link, verify=True, cookies=cf_cookievalue, headers=self.headers, stream=True)
filename = os.path.basename(urllib.unquote(t.url).decode('utf-8')) filename = os.path.basename(urllib.unquote(t.url).decode('utf-8'))
path = os.path.join(mylar.CONFIG.DDL_LOCATION, filename) try:
remote_filesize = int(t.headers['Content-length'])
logger.fdebug('remote filesize: %s' % remote_filesize)
except Exception as e:
logger.warn('[WARNING] Unable to retrieve remote file size. Error returned as : %s' % e)
remote_filesize = 0
mylar.DDL_LOCK = False
return ({"success": False,
"filename": filename,
"path": None})
else:
#write the filename to the db for tracking purposes...
myDB.upsert('ddl_info', {'filename': filename, 'remote_filesize': remote_filesize}, {'id': id})
#write the filename to the db for tracking purposes... path = os.path.join(mylar.CONFIG.DDL_LOCATION, filename)
myDB.upsert('ddl_info', {'filename': filename}, {'id': id})
if t.headers.get('content-encoding') == 'gzip': #.get('Content-Encoding') == 'gzip': if t.headers.get('content-encoding') == 'gzip': #.get('Content-Encoding') == 'gzip':
buf = StringIO(t.content) buf = StringIO(t.content)
f = gzip.GzipFile(fileobj=buf) f = gzip.GzipFile(fileobj=buf)
with open(path, 'wb') as f: if resume is not None:
for chunk in t.iter_content(chunk_size=1024): with open(path, 'ab') as f:
if chunk: # filter out keep-alive new chunks for chunk in t.iter_content(chunk_size=1024):
f.write(chunk) if chunk:
f.flush() f.write(chunk)
f.flush()
else:
with open(path, 'wb') as f:
for chunk in t.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
f.flush()
except Exception as e: except Exception as e:
logger.error('[ERROR] %s' % e) logger.error('[ERROR] %s' % e)

View File

@ -3045,14 +3045,18 @@ def ddl_downloader(queue):
#write this to the table so we have a record of what's going on. #write this to the table so we have a record of what's going on.
ctrlval = {'id': item['id']} ctrlval = {'id': item['id']}
val = {'status': 'Downloading'} val = {'status': 'Downloading',
'updated_date': datetime.datetime.now().strftime('%Y-%m-%d %H:%M')}
myDB.upsert('ddl_info', val, ctrlval) myDB.upsert('ddl_info', val, ctrlval)
ddz = getcomics.GC() ddz = getcomics.GC()
ddzstat = ddz.downloadit(item['id'], item['link'], item['mainlink']) ddzstat = ddz.downloadit(item['id'], item['link'], item['mainlink'], item['resume'])
nval = {'status': 'Completed'} if ddzstat['success'] is True:
myDB.upsert('ddl_info', nval, ctrlval) tdnow = datetime.datetime.now()
nval = {'status': 'Completed',
'updated_date': tdnow.strftime('%Y-%m-%d %H:%M')}
myDB.upsert('ddl_info', nval, ctrlval)
if all([ddzstat['success'] is True, mylar.CONFIG.POST_PROCESSING is True]): if all([ddzstat['success'] is True, mylar.CONFIG.POST_PROCESSING is True]):
try: try:
@ -3076,10 +3080,13 @@ def ddl_downloader(queue):
'ddl': True}) 'ddl': True})
except Exception as e: except Exception as e:
logger.info('process error: %s [%s]' %(e, ddzstat)) logger.info('process error: %s [%s]' %(e, ddzstat))
elif mylar.CONFIG.POST_PROCESSING is True: elif all([ddzstat['success'] is True, mylar.CONFIG.POST_PROCESSING is False]):
logger.info('File successfully downloaded. Post Processing is not enabled - item retained here: %s' % os.path.join(ddzstat['path'],ddzstat['filename'])) logger.info('File successfully downloaded. Post Processing is not enabled - item retained here: %s' % os.path.join(ddzstat['path'],ddzstat['filename']))
else: else:
logger.info('[Status: %s] Failed to download: %s ' % (ddzstat['success'], ddzstat)) logger.info('[Status: %s] Failed to download: %s ' % (ddzstat['success'], ddzstat))
nval = {'status': 'Failed',
'updated_date': datetime.datetime.now().strftime('%Y-%m-%d %H:%M')}
myDB.upsert('ddl_info', nval, ctrlval)
def postprocess_main(queue): def postprocess_main(queue):
while True: while True:

View File

@ -291,6 +291,15 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
else: else:
aliases = aliases aliases = aliases
logger.fdebug('comicIssues: %s' % comicIssues)
logger.fdebug('seriesyear: %s / currentyear: %s' % (SeriesYear, helpers.today()[:4]))
logger.fdebug('comicType: %s' % comic['Type'])
if all([int(comicIssues) == 1, SeriesYear < helpers.today()[:4], comic['Type'] != 'One-Shot', comic['Type'] != 'TPB']):
logger.info('Determined to be a one-shot issue. Forcing Edition to One-Shot')
booktype = 'One-Shot'
else:
booktype = comic['Type']
controlValueDict = {"ComicID": comicid} controlValueDict = {"ComicID": comicid}
newValueDict = {"ComicName": comic['ComicName'], newValueDict = {"ComicName": comic['ComicName'],
"ComicSortName": sortname, "ComicSortName": sortname,
@ -309,7 +318,7 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
"AlternateSearch": aliases, "AlternateSearch": aliases,
# "ComicPublished": gcdinfo['resultPublished'], # "ComicPublished": gcdinfo['resultPublished'],
"ComicPublished": "Unknown", "ComicPublished": "Unknown",
"Type": comic['Type'], "Type": booktype,
"Corrected_Type": comic['Corrected_Type'], "Corrected_Type": comic['Corrected_Type'],
"Collects": issue_list, "Collects": issue_list,
"DateAdded": helpers.today(), "DateAdded": helpers.today(),

View File

@ -41,7 +41,6 @@ class RTorrent(object):
mylar.CONFIG.RTORRENT_PASSWORD, mylar.CONFIG.RTORRENT_PASSWORD,
mylar.CONFIG.RTORRENT_AUTHENTICATION, mylar.CONFIG.RTORRENT_AUTHENTICATION,
mylar.CONFIG.RTORRENT_VERIFY, mylar.CONFIG.RTORRENT_VERIFY,
mylar.CONFIG.RTORRENT_SSL,
mylar.CONFIG.RTORRENT_RPC_URL, mylar.CONFIG.RTORRENT_RPC_URL,
mylar.CONFIG.RTORRENT_CA_BUNDLE): mylar.CONFIG.RTORRENT_CA_BUNDLE):
logger.error('[ERROR] Could not connect to %s - exiting' % mylar.CONFIG.RTORRENT_HOST) logger.error('[ERROR] Could not connect to %s - exiting' % mylar.CONFIG.RTORRENT_HOST)

View File

@ -23,23 +23,37 @@ class TorrentClient(object):
# Use default ssl verification # Use default ssl verification
return True return True
def connect(self, host, username, password, auth, verify, ssl, rpc_url, ca_bundle): def connect(self, host, username, password, auth, verify, rpc_url, ca_bundle, test=False):
if self.conn is not None: if self.conn is not None:
return self.conn return self.conn
if not host: if not host:
return False return {'status': False, 'error': 'No host specified'}
url = helpers.cleanHost(host, protocol = True, ssl = ssl) url = host
if host.startswith('https:'):
ssl = True
else:
if not host.startswith('http://'):
url = 'http://' + url
ssl = False
#add on the slash ..
if not url.endswith('/'):
url += '/'
#url = helpers.cleanHost(host, protocol = True, ssl = ssl)
# Automatically add '+https' to 'httprpc' protocol if SSL is enabled # Automatically add '+https' to 'httprpc' protocol if SSL is enabled
if ssl is True and url.startswith('httprpc://'): #if ssl is True and url.startswith('httprpc://'):
url = url.replace('httprpc://', 'httprpc+https://') # url = url.replace('httprpc://', 'httprpc+https://')
#if ssl is False and not url.startswith('http://'):
# url = 'http://' + url
parsed = urlparse(url) #parsed = urlparse(url)
# rpc_url is only used on http/https scgi pass-through # rpc_url is only used on http/https scgi pass-through
if parsed.scheme in ['http', 'https']: if rpc_url is not None:
url += rpc_url url += rpc_url
#logger.fdebug(url) #logger.fdebug(url)
@ -52,8 +66,8 @@ class TorrentClient(object):
verify_ssl=self.getVerifySsl(verify, ca_bundle) verify_ssl=self.getVerifySsl(verify, ca_bundle)
) )
except Exception as err: except Exception as err:
logger.error('Failed to connect to rTorrent: %s', err) logger.error('Make sure you have the right protocol specified for the rtorrent host. Failed to connect to rTorrent - error: %s.' % err)
return False return {'status': False, 'error': err}
else: else:
logger.fdebug('NO username %s / NO password %s' % (username, password)) logger.fdebug('NO username %s / NO password %s' % (username, password))
try: try:
@ -63,10 +77,13 @@ class TorrentClient(object):
verify_ssl=self.getVerifySsl(verify, ca_bundle) verify_ssl=self.getVerifySsl(verify, ca_bundle)
) )
except Exception as err: except Exception as err:
logger.error('Failed to connect to rTorrent: %s', err) logger.error('Failed to connect to rTorrent: %s' % err)
return False return {'status': False, 'error': err}
return self.conn if test is True:
return {'status': True, 'version': self.conn.get_client_version()}
else:
return self.conn
def find_torrent(self, hash): def find_torrent(self, hash):
return self.conn.find_torrent(hash) return self.conn.find_torrent(hash)

View File

@ -516,7 +516,7 @@ class WebInterface(object):
except Exception, e: except Exception, e:
logger.warn('Unable to download image from CV URL link - possibly no arc picture is present: %s' % imageurl) logger.warn('Unable to download image from CV URL link - possibly no arc picture is present: %s' % imageurl)
else: else:
logger.fdebug('comic image retrieval status code: ' + str(r.status_code)) logger.fdebug('comic image retrieval status code: %s' % r.status_code)
if str(r.status_code) != '200': if str(r.status_code) != '200':
logger.warn('Unable to download image from CV URL link: %s [Status Code returned: %s]' % (imageurl, r.status_code)) logger.warn('Unable to download image from CV URL link: %s [Status Code returned: %s]' % (imageurl, r.status_code))
@ -532,8 +532,8 @@ class WebInterface(object):
f.flush() f.flush()
arc_results = mylar.cv.getComic(comicid=None, type='issue', arcid=arcid, arclist=arclist) arc_results = mylar.cv.getComic(comicid=None, type='issue', arcid=arcid, arclist=arclist)
logger.fdebug(module + ' Arcresults: ' + str(arc_results)) logger.fdebug('%s Arcresults: %s' % (module, arc_results))
logger.fdebug('arclist: ' + str(arclist)) logger.fdebug('%s Arclist: %s' % (module, arclist))
if len(arc_results) > 0: if len(arc_results) > 0:
import random import random
@ -610,7 +610,7 @@ class WebInterface(object):
readingorder = int(re.sub('[\,\|]','', rosre).strip()) readingorder = int(re.sub('[\,\|]','', rosre).strip())
else: else:
readingorder = 0 readingorder = 0
logger.fdebug('[' + str(readingorder) + '] issueid:' + str(issid) + ' - findorder#:' + str(findorder)) logger.fdebug('[%s] issueid: %s - findorder#: %s' % (readingorder, issid, findorder))
issuedata.append({"ComicID": comicid, issuedata.append({"ComicID": comicid,
"IssueID": issid, "IssueID": issid,
@ -628,7 +628,7 @@ class WebInterface(object):
"Manual": manual_mod}) "Manual": manual_mod})
n+=1 n+=1
comicid_results = mylar.cv.getComic(comicid=None, type='comicyears', comicidlist=cidlist) comicid_results = mylar.cv.getComic(comicid=None, type='comicyears', comicidlist=cidlist)
logger.fdebug(module + ' Initiating issue updating - just the info') logger.fdebug('%s Initiating issue updating - just the info' % module)
for AD in issuedata: for AD in issuedata:
seriesYear = 'None' seriesYear = 'None'
@ -1335,9 +1335,9 @@ class WebInterface(object):
threading.Thread(target=self.queueissue, kwargs=kwargs).start() threading.Thread(target=self.queueissue, kwargs=kwargs).start()
queueit.exposed = True queueit.exposed = True
def queueissue(self, mode, ComicName=None, ComicID=None, ComicYear=None, ComicIssue=None, IssueID=None, new=False, redirect=None, SeriesYear=None, SARC=None, IssueArcID=None, manualsearch=None, Publisher=None, pullinfo=None, pullweek=None, pullyear=None, manual=False, ComicVersion=None): def queueissue(self, mode, ComicName=None, ComicID=None, ComicYear=None, ComicIssue=None, IssueID=None, new=False, redirect=None, SeriesYear=None, SARC=None, IssueArcID=None, manualsearch=None, Publisher=None, pullinfo=None, pullweek=None, pullyear=None, manual=False, ComicVersion=None, BookType=None):
logger.fdebug('ComicID:' + str(ComicID)) logger.fdebug('ComicID: %s' % ComicID)
logger.fdebug('mode:' + str(mode)) logger.fdebug('mode: %s' % mode)
now = datetime.datetime.now() now = datetime.datetime.now()
myDB = db.DBConnection() myDB = db.DBConnection()
#mode dictates type of queue - either 'want' for individual comics, or 'series' for series watchlist. #mode dictates type of queue - either 'want' for individual comics, or 'series' for series watchlist.
@ -1359,8 +1359,8 @@ class WebInterface(object):
SARC = True SARC = True
IssueArcID = None IssueArcID = None
else: else:
logger.info(u"Story Arc : " + str(SARC) + " queueing selected issue...") logger.info('Story Arc : %s queueing selected issue...' % SARC)
logger.info(u"IssueArcID : " + str(IssueArcID)) logger.fdebug('IssueArcID : %s' % IssueArcID)
#try to load the issue dates - can now sideload issue details. #try to load the issue dates - can now sideload issue details.
dateload = myDB.selectone('SELECT * FROM storyarcs WHERE IssueArcID=?', [IssueArcID]).fetchone() dateload = myDB.selectone('SELECT * FROM storyarcs WHERE IssueArcID=?', [IssueArcID]).fetchone()
if dateload is None: if dateload is None:
@ -1373,25 +1373,24 @@ class WebInterface(object):
ReleaseDate = dateload['ReleaseDate'] ReleaseDate = dateload['ReleaseDate']
Publisher = dateload['IssuePublisher'] Publisher = dateload['IssuePublisher']
SeriesYear = dateload['SeriesYear'] SeriesYear = dateload['SeriesYear']
BookType = dateload['Type']
if ComicYear is None: ComicYear = SeriesYear if ComicYear is None: ComicYear = SeriesYear
if dateload['Volume'] is None: if dateload['Volume'] is None:
logger.info('Marking ' + ComicName + ' #' + ComicIssue + ' as wanted...') logger.info('Marking %s #%s as wanted...' % (ComicName, ComicIssue))
else: else:
logger.info('Marking ' + ComicName + ' (' + dateload['Volume'] + ') #' + ComicIssue + ' as wanted...') logger.info('Marking %s (%s) #%s as wanted...' % (ComicName, dateload['Volume'], ComicIssue))
logger.fdebug('publisher: ' + Publisher) logger.fdebug('publisher: %s' % Publisher)
controlValueDict = {"IssueArcID": IssueArcID} controlValueDict = {"IssueArcID": IssueArcID}
newStatus = {"Status": "Wanted"} newStatus = {"Status": "Wanted"}
myDB.upsert("storyarcs", newStatus, controlValueDict) myDB.upsert("storyarcs", newStatus, controlValueDict)
logger.info('[STORY-ARCS] Now Queuing %s (%s) #%s for search' % (ComicName, ComicYear, ComicIssue)) moduletype = '[STORY-ARCS]'
s = mylar.SEARCH_QUEUE.put({'issueid': IssueArcID, 'comicname': ComicName, 'seriesyear': ComicYear, 'comicid': ComicID, 'issuenumber': ComicIssue}) passinfo = {'issueid': IssueArcID,
#foundcom, prov = search.search_init(ComicName=ComicName, IssueNumber=ComicIssue, ComicYear=ComicYear, SeriesYear=None, Publisher=Publisher, IssueDate=IssueDate, StoreDate=ReleaseDate, IssueID=None, AlternateSearch=None, UseFuzzy=None, ComicVersion=dateload['Volume'], SARC=SARC, IssueArcID=IssueArcID) 'comicname': ComicName,
#if foundcom['status'] is True: 'seriesyear': SeriesYear,
# logger.info(u"Downloaded " + ComicName + " #" + ComicIssue + " (" + str(ComicYear) + ")") 'comicid': ComicID,
# controlValueDict = {"IssueArcID": IssueArcID} 'issuenumber': ComicIssue,
# newStatus = {"Status": "Snatched"} 'booktype': BookType}
#myDB.upsert("storyarcs", newStatus, controlValueDict)
return # foundcom
elif mode == 'pullwant': #and ComicID is None elif mode == 'pullwant': #and ComicID is None
#this is for marking individual comics from the pullist to be downloaded. #this is for marking individual comics from the pullist to be downloaded.
@ -1400,19 +1399,17 @@ class WebInterface(object):
#better to set both to some generic #, and then filter out later... #better to set both to some generic #, and then filter out later...
IssueDate = pullinfo IssueDate = pullinfo
try: try:
ComicYear = IssueDate[:4] SeriesYear = IssueDate[:4]
except: except:
ComicYear == now.year SeriesYear == now.year
if Publisher == 'COMICS': Publisher = None if Publisher == 'COMICS': Publisher = None
logger.info('Now Queuing %s %s for search' % (ComicName, ComicIssue)) moduletype = '[PULL-LIST]'
s = mylar.SEARCH_QUEUE.put({'issueid': IssueID, 'comicname': ComicName, 'seriesyear': ComicYear, 'comicid': ComicID, 'issuenumber': ComicIssue}) passinfo = {'issueid': IssueID,
#foundcom, prov = search.search_init(ComicName=ComicName, IssueNumber=ComicIssue, ComicYear=ComicYear, SeriesYear=None, Publisher=Publisher, IssueDate=IssueDate, StoreDate=IssueDate, IssueID=IssueID, ComicID=ComicID, AlternateSearch=None, mode=mode, UseFuzzy=None, ComicVersion=ComicVersion, allow_packs=False, manual=manual) 'comicname': ComicName,
if manual is True: 'seriesyear': SeriesYear,
return foundcom 'comicid': ComicID,
#if foundcom['status'] is True: 'issuenumber': ComicIssue,
#logger.info('[ONE-OFF MODE] Successfully Downloaded ' + ComicName + ' ' + ComicIssue) 'booktype': BookType}
#return updater.foundsearch(ComicID, IssueID, mode=mode, provider=prov, hash=foundcom['info']['t_hash'], pullinfo={'weeknumber': pullweek, 'year': pullyear})
return
elif mode == 'want' or mode == 'want_ann' or manualsearch: elif mode == 'want' or mode == 'want_ann' or manualsearch:
cdname = myDB.selectone("SELECT * from comics where ComicID=?", [ComicID]).fetchone() cdname = myDB.selectone("SELECT * from comics where ComicID=?", [ComicID]).fetchone()
@ -1430,9 +1427,9 @@ class WebInterface(object):
newStatus = {"Status": "Wanted"} newStatus = {"Status": "Wanted"}
if mode == 'want': if mode == 'want':
if manualsearch: if manualsearch:
logger.info('Initiating manual search for ' + ComicName + ' issue: ' + ComicIssue) logger.info('Initiating manual search for %s issue: %s' % (ComicName, ComicIssue))
else: else:
logger.info(u"Marking " + ComicName + " issue: " + ComicIssue + " as wanted...") logger.info('Marking %s issue: %s as wanted...' % (ComicName, ComicIssue))
myDB.upsert("issues", newStatus, controlValueDict) myDB.upsert("issues", newStatus, controlValueDict)
else: else:
annual_name = myDB.selectone("SELECT * FROM annuals WHERE ComicID=? and IssueID=?", [ComicID, IssueID]).fetchone() annual_name = myDB.selectone("SELECT * FROM annuals WHERE ComicID=? and IssueID=?", [ComicID, IssueID]).fetchone()
@ -1442,55 +1439,49 @@ class WebInterface(object):
ComicName = annual_name['ReleaseComicName'] ComicName = annual_name['ReleaseComicName']
if manualsearch: if manualsearch:
logger.info('Initiating manual search for ' + ComicName + ' : ' + ComicIssue) logger.info('Initiating manual search for %s : %s' % (ComicName, ComicIssue))
else: else:
logger.info(u"Marking " + ComicName + " : " + ComicIssue + " as wanted...") logger.info('Marking %s : %s as wanted...' % (ComicName, ComicIssue))
myDB.upsert("annuals", newStatus, controlValueDict) myDB.upsert("annuals", newStatus, controlValueDict)
#--- moduletype = '[WANTED-SEARCH]'
#this should be on it's own somewhere passinfo = {'issueid': IssueID,
#if IssueID is not None: 'comicname': ComicName,
# controlValueDict = {"IssueID": IssueID} 'seriesyear': SeriesYear,
# newStatus = {"Status": "Wanted"} 'comicid': ComicID,
# myDB.upsert("issues", newStatus, controlValueDict) 'issuenumber': ComicIssue,
#for future reference, the year should default to current year (.datetime) 'booktype': BookType}
if mode == 'want':
issues = myDB.selectone("SELECT IssueDate, ReleaseDate FROM issues WHERE IssueID=?", [IssueID]).fetchone()
elif mode == 'want_ann': if mode == 'want':
issues = myDB.selectone("SELECT IssueDate, ReleaseDate FROM annuals WHERE IssueID=?", [IssueID]).fetchone() issues = myDB.selectone("SELECT IssueDate, ReleaseDate FROM issues WHERE IssueID=?", [IssueID]).fetchone()
if ComicYear == None: elif mode == 'want_ann':
ComicYear = str(issues['IssueDate'])[:4] issues = myDB.selectone("SELECT IssueDate, ReleaseDate FROM annuals WHERE IssueID=?", [IssueID]).fetchone()
if issues['ReleaseDate'] is None or issues['ReleaseDate'] == '0000-00-00': if ComicYear == None:
logger.info('No Store Date found for given issue. This is probably due to not Refreshing the Series beforehand.') ComicYear = str(issues['IssueDate'])[:4]
logger.info('I Will assume IssueDate as Store Date, but you should probably Refresh the Series and try again if required.') if issues['ReleaseDate'] is None or issues['ReleaseDate'] == '0000-00-00':
storedate = issues['IssueDate'] logger.info('No Store Date found for given issue. This is probably due to not Refreshing the Series beforehand.')
else: logger.info('I Will assume IssueDate as Store Date, but you should probably Refresh the Series and try again if required.')
storedate = issues['ReleaseDate'] storedate = issues['IssueDate']
#miy = myDB.selectone("SELECT * FROM comics WHERE ComicID=?", [ComicID]).fetchone() else:
#SeriesYear = miy['ComicYear'] storedate = issues['ReleaseDate']
#AlternateSearch = miy['AlternateSearch']
#Publisher = miy['ComicPublisher']
#UseAFuzzy = miy['UseFuzzy']
#ComicVersion = miy['ComicVersion']
if BookType == 'TPB': if BookType == 'TPB':
logger.info('[%s] Now Queueing %s (%s) for search' % (BookType, ComicName, SeriesYear)) logger.info('%s[%s] Now Queueing %s (%s) for search' % (moduletype, BookType, ComicName, SeriesYear))
elif ComicIssue is None: elif ComicIssue is None:
logger.info('Now Queueing %s (%s) for search' % (ComicName, SeriesYear)) logger.info('%s Now Queueing %s (%s) for search' % (moduletype, ComicName, SeriesYear))
else: else:
logger.info('Now Queueing %s (%s) #%s for search' % (ComicName, SeriesYear, ComicIssue)) logger.info('%s Now Queueing %s (%s) #%s for search' % (moduletype, ComicName, SeriesYear, ComicIssue))
s = mylar.SEARCH_QUEUE.put({'issueid': IssueID, 'comicname': ComicName, 'seriesyear': SeriesYear, 'comicid': ComicID, 'issuenumber': ComicIssue, 'booktype': BookType})
# foundcom, prov = search.search_init(ComicName, ComicIssue, ComicYear, SeriesYear, Publisher, issues['IssueDate'], storedate, IssueID, AlternateSearch, UseAFuzzy, ComicVersion, mode=mode, ComicID=ComicID, manualsearch=manualsearch, filesafe=ComicName_Filesafe, allow_packs=AllowPacks, torrentid_32p=TorrentID_32p) #s = mylar.SEARCH_QUEUE.put({'issueid': IssueID, 'comicname': ComicName, 'seriesyear': SeriesYear, 'comicid': ComicID, 'issuenumber': ComicIssue, 'booktype': BookType})
# if foundcom['status'] is True: s = mylar.SEARCH_QUEUE.put(passinfo)
# # file check to see if issue exists and update 'have' count
# if IssueID is not None:
# logger.info("passing to updater.")
# return updater.foundsearch(ComicID, IssueID, mode=mode, provider=prov, hash=foundcom['info']['t_hash'])
if manualsearch: if manualsearch:
# if it's a manual search, return to null here so the thread will die and not cause http redirect errors. # if it's a manual search, return to null here so the thread will die and not cause http redirect errors.
return return
if ComicID: if ComicID:
return cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % ComicID) return cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % ComicID)
else: else:
raise cherrypy.HTTPRedirect(redirect) return
#raise cherrypy.HTTPRedirect(redirect)
queueissue.exposed = True queueissue.exposed = True
def unqueueissue(self, IssueID, ComicID, ComicName=None, Issue=None, FutureID=None, mode=None, ReleaseComicID=None): def unqueueissue(self, IssueID, ComicID, ComicName=None, Issue=None, FutureID=None, mode=None, ReleaseComicID=None):
@ -2198,6 +2189,25 @@ class WebInterface(object):
annualDelete.exposed = True annualDelete.exposed = True
def ddl_requeue(self, id, mode):
myDB = db.DBConnection()
item = myDB.selectone("SELECT * FROM DDL_INFO WHERE ID=?", [id]).fetchone()
if item is not None:
if mode == 'resume':
if item['status'] != 'Completed':
filesize = os.stat(os.path.join(mylar.CONFIG.DDL_LOCATION, item['filename'])).st_size
mylar.DDL_QUEUE.put({'link': item['link'],
'mainlink': item['mainlink'],
'series': item['series'],
'year': item['year'],
'size': item['size'],
'comicid': item['comicid'],
'issueid': item['issueid'],
'id': item['id'],
'resume': filesize})
ddl_requeue.exposed = True
def queueManage(self): # **args): def queueManage(self): # **args):
myDB = db.DBConnection() myDB = db.DBConnection()
activelist = 'There are currently no items currently downloading via Direct Download (DDL).' activelist = 'There are currently no items currently downloading via Direct Download (DDL).'
@ -2211,22 +2221,28 @@ class WebInterface(object):
'id': active['id']} 'id': active['id']}
resultlist = 'There are currently no items waiting in the Direct Download (DDL) Queue for processing.' resultlist = 'There are currently no items waiting in the Direct Download (DDL) Queue for processing.'
s_info = myDB.select("SELECT a.ComicName, a.ComicVersion, a.ComicID, a.ComicYear, b.Issue_Number, b.IssueID, c.size, c.status, c.id FROM comics as a INNER JOIN issues as b ON a.ComicID = b.ComicID INNER JOIN ddl_info as c ON b.IssueID = c.IssueID WHERE c.status != 'Downloading'") s_info = myDB.select("SELECT a.ComicName, a.ComicVersion, a.ComicID, a.ComicYear, b.Issue_Number, b.IssueID, c.size, c.status, c.id, c.updated_date FROM comics as a INNER JOIN issues as b ON a.ComicID = b.ComicID INNER JOIN ddl_info as c ON b.IssueID = c.IssueID WHERE c.status != 'Downloading'")
if s_info: if s_info:
resultlist = [] resultlist = []
for si in s_info: for si in s_info:
issue = si['Issue_Number'] issue = si['Issue_Number']
if issue is not None: if issue is not None:
issue = '#%s' % issue issue = '#%s' % issue
resultlist.append({'series': si['ComicName'], if si['status'] == 'Completed':
'issue': issue, si_status = '100%'
'id': si['id'], else:
'volume': si['ComicVersion'], si_status = ''
'year': si['ComicYear'], resultlist.append({'series': si['ComicName'],
'size': si['size'].strip(), 'issue': issue,
'comicid': si['ComicID'], 'id': si['id'],
'issueid': si['IssueID'], 'volume': si['ComicVersion'],
'status': si['status']}) 'year': si['ComicYear'],
'size': si['size'].strip(),
'comicid': si['ComicID'],
'issueid': si['IssueID'],
'status': si['status'],
'updated_date': si['updated_date'],
'progress': si_status})
logger.info('resultlist: %s' % resultlist) logger.info('resultlist: %s' % resultlist)
return serve_template(templatename="queue_management.html", title="Queue Management", activelist=activelist, resultlist=resultlist) return serve_template(templatename="queue_management.html", title="Queue Management", activelist=activelist, resultlist=resultlist)
@ -2764,6 +2780,12 @@ class WebInterface(object):
return serve_template(templatename="readinglist.html", title="Reading Lists", issuelist=readlist, counts=counts) return serve_template(templatename="readinglist.html", title="Reading Lists", issuelist=readlist, counts=counts)
readlist.exposed = True readlist.exposed = True
def clear_arcstatus(self, issuearcid=None):
myDB = db.DBConnection()
myDB.upsert('storyarcs', {'Status': 'Skipped'}, {'IssueArcID': issuearcid})
logger.info('Status set to Skipped.')
clear_arcstatus.exposed = True
def storyarc_main(self, arcid=None): def storyarc_main(self, arcid=None):
myDB = db.DBConnection() myDB = db.DBConnection()
arclist = [] arclist = []
@ -2843,7 +2865,7 @@ class WebInterface(object):
elif lowyear == maxyear: elif lowyear == maxyear:
spanyears = str(maxyear) spanyears = str(maxyear)
else: else:
spanyears = str(lowyear) + ' - ' + str(maxyear) spanyears = '%s - %s' % (lowyear, maxyear)
sdir = helpers.arcformat(arcinfo[0]['StoryArc'], spanyears, arcpub) sdir = helpers.arcformat(arcinfo[0]['StoryArc'], spanyears, arcpub)
@ -3311,18 +3333,18 @@ class WebInterface(object):
elif lowyear == maxyear: elif lowyear == maxyear:
spanyears = str(maxyear) spanyears = str(maxyear)
else: else:
spanyears = str(lowyear) + ' - ' + str(maxyear) spanyears = '%s - %s' % (lowyear, maxyear)
logger.info('arcpub: ' + arcpub) logger.info('arcpub: %s' % arcpub)
dstloc = helpers.arcformat(arcdir, spanyears, arcpub) dstloc = helpers.arcformat(arcdir, spanyears, arcpub)
filelist = None filelist = None
if dstloc is not None: if dstloc is not None:
if not os.path.isdir(dstloc): if not os.path.isdir(dstloc):
if mylar.CONFIG.STORYARCDIR: if mylar.CONFIG.STORYARCDIR:
logger.info('Story Arc Directory [' + dstloc + '] does not exist! - attempting to create now.') logger.info('Story Arc Directory [%s] does not exist! - attempting to create now.' % dstloc)
else: else:
logger.info('Story Arc Grab-Bag Directory [' + dstloc + '] does not exist! - attempting to create now.') logger.info('Story Arc Grab-Bag Directory [%s] does not exist! - attempting to create now.' % dstloc)
checkdirectory = filechecker.validateAndCreateDirectory(dstloc, True) checkdirectory = filechecker.validateAndCreateDirectory(dstloc, True)
if not checkdirectory: if not checkdirectory:
logger.warn('Error trying to validate/create directory. Aborting this process at this time.') logger.warn('Error trying to validate/create directory. Aborting this process at this time.')
@ -3346,7 +3368,7 @@ class WebInterface(object):
fchk = filechecker.FileChecker(dir=dstloc, watchcomic=None, Publisher=None, sarc='true', justparse=True) fchk = filechecker.FileChecker(dir=dstloc, watchcomic=None, Publisher=None, sarc='true', justparse=True)
filechk = fchk.listFiles() filechk = fchk.listFiles()
fccnt = filechk['comiccount'] fccnt = filechk['comiccount']
logger.fdebug('[STORY ARC DIRECTORY] ' + str(fccnt) + ' files exist within this directory.') logger.fdebug('[STORY ARC DIRECTORY] %s files exist within this directory.' % fccnt)
if fccnt > 0: if fccnt > 0:
filelist = filechk['comiclist'] filelist = filechk['comiclist']
logger.info(filechk) logger.info(filechk)
@ -3357,11 +3379,14 @@ class WebInterface(object):
sarc_title = None sarc_title = None
showonreadlist = 1 # 0 won't show storyarcissues on storyarcs main page, 1 will show showonreadlist = 1 # 0 won't show storyarcissues on storyarcs main page, 1 will show
for arc in ArcWatch: for arc in ArcWatch:
newStatus = 'Skipped'
if arc['Manual'] == 'deleted': if arc['Manual'] == 'deleted':
continue continue
sarc_title = arc['StoryArc'] sarc_title = arc['StoryArc']
logger.fdebug('[' + arc['StoryArc'] + '] ' + arc['ComicName'] + ' : ' + arc['IssueNumber']) logger.fdebug('[%s] %s : %s' % (arc['StoryArc'], arc['ComicName'], arc['IssueNumber']))
matcheroso = "no" matcheroso = "no"
#fc = filechecker.FileChecker(watchcomic=arc['ComicName']) #fc = filechecker.FileChecker(watchcomic=arc['ComicName'])
@ -3379,29 +3404,45 @@ class WebInterface(object):
# if it's a multi-volume series, it's decimalized - let's get rid of the decimal. # if it's a multi-volume series, it's decimalized - let's get rid of the decimal.
GCDissue, whocares = helpers.decimal_issue(arc['IssueNumber']) GCDissue, whocares = helpers.decimal_issue(arc['IssueNumber'])
GCDissue = int(GCDissue) / 1000 GCDissue = int(GCDissue) / 1000
if '.' not in str(GCDissue): GCDissue = str(GCDissue) + ".00" if '.' not in str(GCDissue):
logger.fdebug("issue converted to " + str(GCDissue)) GCDissue = '%s.00' % GCDissue
logger.fdebug("issue converted to %s" % GCDissue)
isschk = myDB.selectone("SELECT * FROM issues WHERE Issue_Number=? AND ComicID=?", [str(GCDissue), comic['ComicID']]).fetchone() isschk = myDB.selectone("SELECT * FROM issues WHERE Issue_Number=? AND ComicID=?", [str(GCDissue), comic['ComicID']]).fetchone()
else: else:
issue_int = helpers.issuedigits(arc['IssueNumber']) issue_int = helpers.issuedigits(arc['IssueNumber'])
logger.fdebug('int_issue = ' + str(issue_int)) logger.fdebug('int_issue = %s' % issue_int)
isschk = myDB.selectone("SELECT * FROM issues WHERE Int_IssueNumber=? AND ComicID=?", [issue_int, comic['ComicID']]).fetchone() #AND STATUS !='Snatched'", [issue_int, comic['ComicID']]).fetchone() isschk = myDB.selectone("SELECT * FROM issues WHERE Int_IssueNumber=? AND ComicID=?", [issue_int, comic['ComicID']]).fetchone() #AND STATUS !='Snatched'", [issue_int, comic['ComicID']]).fetchone()
if isschk is None: if isschk is None:
logger.fdebug("we matched on name, but issue " + arc['IssueNumber'] + " doesn't exist for " + comic['ComicName']) logger.fdebug('We matched on name, but issue %s doesn\'t exist for %s' % (arc['IssueNumber'], comic['ComicName']))
else: else:
#this gets ugly - if the name matches and the issue, it could still be wrong series #this gets ugly - if the name matches and the issue, it could still be wrong series
#use series year to break it down further. #use series year to break it down further.
logger.fdebug('COMIC-comicyear: ' + str(int(comic['ComicYear']))) logger.fdebug('COMIC-comicyear: %s' % comic['ComicYear'])
logger.fdebug('ARC-seriesyear: ' + str(int(arc['SeriesYear']))) logger.fdebug('B4-ARC-seriesyear: %s' % arc['SeriesYear'])
if int(comic['ComicYear']) != int(arc['SeriesYear']): if any([arc['SeriesYear'] is None, arc['SeriesYear'] == 'None']):
logger.fdebug("Series years are different - discarding match. " + str(comic['ComicYear']) + " != " + str(arc['SeriesYear'])) vy = '2099-00-00'
for x in isschk:
if any([x['IssueDate'] is None, x['IssueDate'] == '0000-00-00']):
sy = x['StoreDate']
if any([sy is None, sy == '0000-00-00']):
continue
else:
sy = x['IssueDate']
if sy < vy:
v_seriesyear = sy
seriesyear = v_seriesyear
logger.info('No Series year set. Discovered & set to %s' % seriesyear)
else: else:
logger.fdebug("issue #: %s is present!" % arc['IssueNumber']) seriesyear = arc['SeriesYear']
logger.fdebug('isschk: %s' % isschk) logger.fdebug('ARC-seriesyear: %s' % seriesyear)
logger.fdebug("Comicname: " + arc['ComicName']) if int(comic['ComicYear']) != int(seriesyear):
logger.fdebug("ComicID: " + str(isschk['ComicID'])) logger.fdebug('Series years are different - discarding match. %s != %s' % (comic['ComicYear'], seriesyear))
logger.fdebug("Issue: %s" % arc['IssueNumber']) else:
logger.fdebug("IssueArcID: " + str(arc['IssueArcID'])) logger.fdebug('issue #: %s is present!' % arc['IssueNumber'])
logger.fdebug('Comicname: %s' % arc['ComicName'])
logger.fdebug('ComicID: %s' % isschk['ComicID'])
logger.fdebug('Issue: %s' % arc['IssueNumber'])
logger.fdebug('IssueArcID: %s' % arc['IssueArcID'])
#gather the matches now. #gather the matches now.
arc_match.append({ arc_match.append({
"match_storyarc": arc['StoryArc'], "match_storyarc": arc['StoryArc'],
@ -3416,17 +3457,17 @@ class WebInterface(object):
matcheroso = "yes" matcheroso = "yes"
break break
if matcheroso == "no": if matcheroso == "no":
logger.fdebug("[NO WATCHLIST MATCH] Unable to find a match for " + arc['ComicName'] + " :#" + arc['IssueNumber']) logger.fdebug('[NO WATCHLIST MATCH] Unable to find a match for %s :#%s' % (arc['ComicName'], arc['IssueNumber']))
wantedlist.append({ wantedlist.append({
"ComicName": arc['ComicName'], "ComicName": arc['ComicName'],
"IssueNumber": arc['IssueNumber'], "IssueNumber": arc['IssueNumber'],
"IssueYear": arc['IssueYear']}) "IssueYear": arc['IssueYear']})
if filelist is not None and mylar.CONFIG.STORYARCDIR: if filelist is not None and mylar.CONFIG.STORYARCDIR:
logger.fdebug("[NO WATCHLIST MATCH] Checking against lcoal Arc directory for given issue.") logger.fdebug('[NO WATCHLIST MATCH] Checking against local Arc directory for given issue.')
fn = 0 fn = 0
valids = [x for x in filelist if re.sub('[\|\s]','', x['dynamic_name'].lower()).strip() == re.sub('[\|\s]','', arc['DynamicComicName'].lower()).strip()] valids = [x for x in filelist if re.sub('[\|\s]','', x['dynamic_name'].lower()).strip() == re.sub('[\|\s]','', arc['DynamicComicName'].lower()).strip()]
logger.info('valids: ' + str(valids)) logger.fdebug('valids: %s' % valids)
if len(valids) > 0: if len(valids) > 0:
for tmpfc in valids: #filelist: for tmpfc in valids: #filelist:
haveissue = "no" haveissue = "no"
@ -3435,19 +3476,19 @@ class WebInterface(object):
fcdigit = helpers.issuedigits(arc['IssueNumber']) fcdigit = helpers.issuedigits(arc['IssueNumber'])
int_iss = helpers.issuedigits(temploc) int_iss = helpers.issuedigits(temploc)
if int_iss == fcdigit: if int_iss == fcdigit:
logger.fdebug(arc['ComicName'] + ' Issue #' + arc['IssueNumber'] + ' already present in StoryArc directory.') logger.fdebug('%s Issue #%s already present in StoryArc directory' % (arc['ComicName'], arc['IssueNumber']))
#update storyarcs db to reflect status. #update storyarcs db to reflect status.
rr_rename = False rr_rename = False
if mylar.CONFIG.READ2FILENAME: if mylar.CONFIG.READ2FILENAME:
readorder = helpers.renamefile_readingorder(arc['ReadingOrder']) readorder = helpers.renamefile_readingorder(arc['ReadingOrder'])
if all([tmpfc['reading_order'] is not None, int(readorder) != int(tmpfc['reading_order']['reading_sequence'])]): if all([tmpfc['reading_order'] is not None, int(readorder) != int(tmpfc['reading_order']['reading_sequence'])]):
logger.warn('reading order sequence has changed for this issue from ' + str(tmpfc['reading_order']['reading_sequence']) + ' to ' + str(readorder)) logger.warn('reading order sequence has changed for this issue from %s to %s' % (tmpfc['reading_order']['reading_sequence'], readorder))
rr_rename = True rr_rename = True
dfilename = str(readorder) + '-' + tmpfc['reading_order']['filename'] dfilename = '%s-%s' % (readorder, tmpfc['reading_order']['filename'])
elif tmpfc['reading_order'] is None: elif tmpfc['reading_order'] is None:
dfilename = str(readorder) + '-' + tmpfc['comicfilename'] dfilename = '%s-%s' % (readorder, tmpfc['comicfilename'])
else: else:
dfilename = str(readorder) + '-' + tmpfc['reading_order']['filename'] dfilename = '%s-%s' % (readorder, tmpfc['reading_order']['filename'])
else: else:
dfilename = tmpfc['comicfilename'] dfilename = tmpfc['comicfilename']
@ -3457,21 +3498,30 @@ class WebInterface(object):
loc_path = os.path.join(tmpfc['comiclocation'], dfilename) loc_path = os.path.join(tmpfc['comiclocation'], dfilename)
if rr_rename: if rr_rename:
logger.fdebug('Now re-sequencing file to : ' + dfilename) logger.fdebug('Now re-sequencing file to : %s' % dfilename)
os.rename(os.path.join(tmpfc['comiclocation'],tmpfc['comicfilename']), loc_path) os.rename(os.path.join(tmpfc['comiclocation'],tmpfc['comicfilename']), loc_path)
newVal = {"Status": "Downloaded", newStatus = 'Downloaded'
newVal = {"Status": newStatus,
"Location": loc_path} #dfilename} "Location": loc_path} #dfilename}
ctrlVal = {"IssueArcID": arc['IssueArcID']} ctrlVal = {"IssueArcID": arc['IssueArcID']}
myDB.upsert("storyarcs", newVal, ctrlVal) myDB.upsert("storyarcs", newVal, ctrlVal)
break
else:
newStatus = 'Skipped'
fn+=1 fn+=1
if newStatus == 'Skipped':
#this will set all None Status' to Skipped (at least initially)
newVal = {"Status": "Skipped"}
ctrlVal = {"IssueArcID": arc['IssueArcID']}
myDB.upsert("storyarcs", newVal, ctrlVal)
continue continue
newVal = {"Status": "Skipped"} newVal = {"Status": "Skipped"}
ctrlVal = {"IssueArcID": arc['IssueArcID']} ctrlVal = {"IssueArcID": arc['IssueArcID']}
myDB.upsert("storyarcs", newVal, ctrlVal) myDB.upsert("storyarcs", newVal, ctrlVal)
logger.fdebug(str(len(arc_match)) + " issues currently exist on your watchlist that are within this arc. Analyzing...") logger.fdebug('%s issues currently exist on your watchlist that are within this arc. Analyzing...' % len(arc_match))
for m_arc in arc_match: for m_arc in arc_match:
#now we cycle through the issues looking for a match. #now we cycle through the issues looking for a match.
#issue = myDB.selectone("SELECT * FROM issues where ComicID=? and Issue_Number=?", [m_arc['match_id'], m_arc['match_issue']]).fetchone() #issue = myDB.selectone("SELECT * FROM issues where ComicID=? and Issue_Number=?", [m_arc['match_id'], m_arc['match_issue']]).fetchone()
@ -3479,11 +3529,9 @@ class WebInterface(object):
if issue is None: pass if issue is None: pass
else: else:
logger.fdebug('issue: %s ... %s' % (issue['Issue_Number'], m_arc['match_issue']))
logger.fdebug("issue: " + issue['Issue_Number'] + "..." + m_arc['match_issue'])
# if helpers.decimal_issue(issuechk['Issue_Number']) == helpers.decimal_issue(m_arc['match_issue']):
if issue['Issue_Number'] == m_arc['match_issue']: if issue['Issue_Number'] == m_arc['match_issue']:
logger.fdebug("we matched on " + issue['Issue_Number'] + " for " + m_arc['match_name']) logger.fdebug('We matched on %s for %s' % (issue['Issue_Number'], m_arc['match_name']))
if issue['Status'] == 'Downloaded' or issue['Status'] == 'Archived' or issue['Status'] == 'Snatched': if issue['Status'] == 'Downloaded' or issue['Status'] == 'Archived' or issue['Status'] == 'Snatched':
if showonreadlist: if showonreadlist:
showctrlVal = {"IssueID": issue['IssueID']} showctrlVal = {"IssueID": issue['IssueID']}
@ -3494,7 +3542,7 @@ class WebInterface(object):
"ComicID": m_arc['match_id']} "ComicID": m_arc['match_id']}
myDB.upsert("readlist", shownewVal, showctrlVal) myDB.upsert("readlist", shownewVal, showctrlVal)
logger.fdebug("Already have " + issue['ComicName'] + " :# " + issue['Issue_Number']) logger.fdebug('Already have %s : #%s' % (issue['ComicName'], issue['Issue_Number']))
if issue['Location'] is not None: if issue['Location'] is not None:
issloc = os.path.join(m_arc['match_filedirectory'], issue['Location']) issloc = os.path.join(m_arc['match_filedirectory'], issue['Location'])
else: else:
@ -3512,10 +3560,10 @@ class WebInterface(object):
continue continue
except: except:
pass pass
logger.fdebug('source location set to : ' + issloc) logger.fdebug('source location set to : %s' % issloc)
if all([mylar.CONFIG.STORYARCDIR, mylar.CONFIG.COPY2ARCDIR]): if all([mylar.CONFIG.STORYARCDIR, mylar.CONFIG.COPY2ARCDIR]):
logger.fdebug('Destination location set to : ' + m_arc['destination_location']) logger.fdebug('Destination location set to : %s' % m_arc['destination_location'])
logger.fdebug('Attempting to copy into StoryArc directory') logger.fdebug('Attempting to copy into StoryArc directory')
#copy into StoryArc directory... #copy into StoryArc directory...
@ -3525,13 +3573,13 @@ class WebInterface(object):
if mylar.CONFIG.READ2FILENAME: if mylar.CONFIG.READ2FILENAME:
readorder = helpers.renamefile_readingorder(m_arc['match_readingorder']) readorder = helpers.renamefile_readingorder(m_arc['match_readingorder'])
if all([m_arc['match_readingorder'] is not None, int(readorder) != int(m_arc['match_readingorder'])]): if all([m_arc['match_readingorder'] is not None, int(readorder) != int(m_arc['match_readingorder'])]):
logger.warn('reading order sequence has changed for this issue from ' + str(m_arc['match_reading_order']) + ' to ' + str(readorder)) logger.warn('Reading order sequence has changed for this issue from %s to %s' % (m_arc['match_reading_order'], readorder))
rr_rename = True rr_rename = True
dfilename = str(readorder) + '-' + issue['Location'] dfilename = '%s-%s' % (readorder, issue['Location'])
elif m_arc['match_readingorder'] is None: elif m_arc['match_readingorder'] is None:
dfilename = str(readorder) + '-' + issue['Location'] dfilename = '%s-%s' % (readorder, issue['Location'])
else: else:
dfilename = str(readorder) + '-' + issue['Location'] dfilename = '%s-%s' % (readorder, issue['Location'])
else: else:
dfilename = issue['Location'] dfilename = issue['Location']
@ -3542,21 +3590,21 @@ class WebInterface(object):
dstloc = os.path.join(m_arc['destination_location'], dfilename) dstloc = os.path.join(m_arc['destination_location'], dfilename)
if rr_rename: if rr_rename:
logger.fdebug('Now re-sequencing COPIED file to : ' + dfilename) logger.fdebug('Now re-sequencing COPIED file to : %s' % dfilename)
os.rename(issloc, dstloc) os.rename(issloc, dstloc)
if not os.path.isfile(dstloc): if not os.path.isfile(dstloc):
logger.fdebug('Copying ' + issloc + ' to ' + dstloc) logger.fdebug('Copying %s to %s' % (issloc, dstloc))
try: try:
fileoperation = helpers.file_ops(issloc, dstloc, arc=True) fileoperation = helpers.file_ops(issloc, dstloc, arc=True)
if not fileoperation: if not fileoperation:
raise OSError raise OSError
except (OSError, IOError): except (OSError, IOError):
logger.error('Failed to ' + mylar.CONFIG.FILE_OPTS + ' ' + issloc + ' - check directories and manually re-run.') logger.error('Failed to %s %s - check directories and manually re-run.' % (mylar.CONFIG.FILE_OPTS, issloc))
continue continue
else: else:
logger.fdebug('Destination file exists: ' + dstloc) logger.fdebug('Destination file exists: %s' % dstloc)
location_path = dstloc location_path = dstloc
else: else:
location_path = issloc location_path = issloc
@ -3569,12 +3617,12 @@ class WebInterface(object):
myDB.upsert("storyarcs",newVal,ctrlVal) myDB.upsert("storyarcs",newVal,ctrlVal)
else: else:
logger.fdebug("We don't have " + issue['ComicName'] + " :# " + issue['Issue_Number']) logger.fdebug('We don\'t have %s : #%s' % (issue['ComicName'], issue['Issue_Number']))
ctrlVal = {"IssueArcID": m_arc['match_issuearcid']} ctrlVal = {"IssueArcID": m_arc['match_issuearcid']}
newVal = {"Status": issue['Status'], #"Wanted", newVal = {"Status": issue['Status'], #"Wanted",
"IssueID": issue['IssueID']} "IssueID": issue['IssueID']}
myDB.upsert("storyarcs", newVal, ctrlVal) myDB.upsert("storyarcs", newVal, ctrlVal)
logger.info("Marked " + issue['ComicName'] + " :# " + issue['Issue_Number'] + " as " + issue['Status']) logger.info('Marked %s :#%s as %s' % (issue['ComicName'], issue['Issue_Number'], issue['Status']))
arcstats = self.storyarc_main(StoryArcID) arcstats = self.storyarc_main(StoryArcID)
logger.info('[STORY-ARCS] Completed Missing/Recheck Files for %s [%s / %s]' % (arcname, arcstats['Have'], arcstats['TotalIssues'])) logger.info('[STORY-ARCS] Completed Missing/Recheck Files for %s [%s / %s]' % (arcname, arcstats['Have'], arcstats['TotalIssues']))
@ -3588,7 +3636,6 @@ class WebInterface(object):
def ReadGetWanted(self, StoryArcID): def ReadGetWanted(self, StoryArcID):
# this will queue up (ie. make 'Wanted') issues in a given Story Arc that are 'Not Watched' # this will queue up (ie. make 'Wanted') issues in a given Story Arc that are 'Not Watched'
print StoryArcID
stupdate = [] stupdate = []
mode = 'story_arc' mode = 'story_arc'
myDB = db.DBConnection() myDB = db.DBConnection()
@ -3596,25 +3643,25 @@ class WebInterface(object):
if wantedlist is not None: if wantedlist is not None:
for want in wantedlist: for want in wantedlist:
print want print want
issuechk = myDB.selectone("SELECT * FROM issues WHERE IssueID=?", [want['IssueArcID']]).fetchone() issuechk = myDB.selectone("SELECT a.Type, a.ComicYear, b.ComicName, b.Issue_Number, b.ComicID, b.IssueID FROM comics as a INNER JOIN issues as b on a.ComicID = b.ComicID WHERE b.IssueID=?", [want['IssueArcID']]).fetchone()
SARC = want['StoryArc'] SARC = want['StoryArc']
IssueArcID = want['IssueArcID'] IssueArcID = want['IssueArcID']
Publisher = want['Publisher'] Publisher = want['Publisher']
if issuechk is None: if issuechk is None:
# none means it's not a 'watched' series # none means it's not a 'watched' series
s_comicid = want['ComicID'] #None s_comicid = want['ComicID'] #None
s_issueid = want['IssueID'] #None s_issueid = want['IssueArcID'] #None
BookType = want['Type']
stdate = want['ReleaseDate'] stdate = want['ReleaseDate']
issdate = want['IssueDate'] issdate = want['IssueDate']
logger.fdebug("-- NOT a watched series queue.") logger.fdebug("-- NOT a watched series queue.")
logger.fdebug(want['ComicName'] + " -- #" + str(want['IssueNumber'])) logger.fdebug('%s -- #%s' % (want['ComicName'], want['IssueNumber']))
logger.fdebug(u"Story Arc : " + str(SARC) + " queueing the selected issue...") logger.fdebug('Story Arc %s : queueing the selected issue...' % SARC)
logger.fdebug(u"IssueArcID : " + str(IssueArcID)) logger.fdebug('IssueArcID : %s' % IssueArcID)
logger.fdebug(u"ComicID: " + str(s_comicid) + " --- IssueID: " + str(s_issueid)) # no comicid in issues table. logger.fdebug('ComicID: %s --- IssueID: %s' % (s_comicid, s_issueid)) # no comicid in issues table.
logger.fdebug(u"ReleaseDate: " + str(stdate) + " --- IssueDate: " + str(issdate)) logger.fdebug('ReleaseDate: %s --- IssueDate: %s' % (stdate, issdate))
#logger.info(u'Publisher: ' + want['Publisher']) <-- no publisher in issues table.
issueyear = want['IssueYEAR'] issueyear = want['IssueYEAR']
logger.fdebug('IssueYear: ' + str(issueyear)) logger.fdebug('IssueYear: %s' % issueyear)
if issueyear is None or issueyear == 'None': if issueyear is None or issueyear == 'None':
try: try:
logger.fdebug('issdate:' + str(issdate)) logger.fdebug('issdate:' + str(issdate))
@ -3624,31 +3671,44 @@ class WebInterface(object):
except: except:
issueyear = stdate[:4] issueyear = stdate[:4]
logger.fdebug('ComicYear: ' + str(want['SeriesYear'])) logger.fdebug('ComicYear: %s' % want['SeriesYear'])
foundcom, prov = search.search_init(ComicName=want['ComicName'], IssueNumber=want['IssueNumber'], ComicYear=issueyear, SeriesYear=want['SeriesYear'], Publisher=Publisher, IssueDate=issdate, StoreDate=stdate, IssueID=s_issueid, SARC=SARC, IssueArcID=IssueArcID, oneoff=True) passinfo = {'issueid': s_issueid,
'comicname': want['ComicName'],
'seriesyear': want['SeriesYear'],
'comicid': s_comicid,
'issuenumber': want['IssueNumber'],
'booktype': BookType}
#oneoff = True ?
else: else:
# it's a watched series # it's a watched series
s_comicid = issuechk['ComicID'] s_comicid = issuechk['ComicID']
s_issueid = issuechk['IssueID'] s_issueid = issuechk['IssueID']
logger.fdebug("-- watched series queue.") logger.fdebug("-- watched series queue.")
logger.fdebug(issuechk['ComicName'] + " -- #" + str(issuechk['Issue_Number'])) logger.fdebug('%s --- #%s' % (issuechk['ComicName'], issuechk['Issue_Number']))
foundcom, prov = search.search_init(ComicName=issuechk['ComicName'], IssueNumber=issuechk['Issue_Number'], ComicYear=issuechk['IssueYear'], SeriesYear=issuechk['SeriesYear'], Publisher=Publisher, IssueDate=None, StoreDate=issuechk['ReleaseDate'], IssueID=issuechk['IssueID'], AlternateSearch=None, UseFuzzy=None, ComicVersion=None, SARC=SARC, IssueArcID=IssueArcID) passinfo = {'issueid': s_issueid,
'comicname': issuechk['ComicName'],
'seriesyear': issuechk['SeriesYear'],
'comicid': s_comicid,
'issuenumber': issuechk['Issue_Number'],
'booktype': issuechk['Type']}
if foundcom['status'] is True: mylar.SEARCH_QUEUE.put(passinfo)
logger.fdebug('sucessfully found.')
#update the status - this is necessary for torrents as they are in 'snatched' status. #if foundcom['status'] is True:
updater.foundsearch(s_comicid, s_issueid, mode=mode, provider=prov, SARC=SARC, IssueArcID=IssueArcID) # logger.fdebug('sucessfully found.')
else: # #update the status - this is necessary for torrents as they are in 'snatched' status.
logger.fdebug('not sucessfully found.') # updater.foundsearch(s_comicid, s_issueid, mode=mode, provider=prov, SARC=SARC, IssueArcID=IssueArcID)
stupdate.append({"Status": "Wanted", #else:
"IssueArcID": IssueArcID, # logger.fdebug('not sucessfully found.')
"IssueID": s_issueid}) # stupdate.append({"Status": "Wanted",
# "IssueArcID": IssueArcID,
# "IssueID": s_issueid})
watchlistchk = myDB.select("SELECT * FROM storyarcs WHERE StoryArcID=? AND Status='Wanted'", [StoryArcID]) watchlistchk = myDB.select("SELECT * FROM storyarcs WHERE StoryArcID=? AND Status='Wanted'", [StoryArcID])
if watchlistchk is not None: if watchlistchk is not None:
for watchchk in watchlistchk: for watchchk in watchlistchk:
logger.fdebug('Watchlist hit - ' + str(watchchk['ComicName'])) logger.fdebug('Watchlist hit - %s' % watchchk['ComicName'])
issuechk = myDB.selectone("SELECT * FROM issues WHERE IssueID=?", [watchchk['IssueArcID']]).fetchone() issuechk = myDB.selectone("SELECT a.Type, a.ComicYear, b.ComicName, b.Issue_Number, b.ComicID, b.IssueID FROM comics as a INNER JOIN issues as b on a.ComicID = b.ComicID WHERE b.IssueID=?", [watchchk['IssueArcID']]).fetchone()
SARC = watchchk['StoryArc'] SARC = watchchk['StoryArc']
IssueArcID = watchchk['IssueArcID'] IssueArcID = watchchk['IssueArcID']
if issuechk is None: if issuechk is None:
@ -3659,17 +3719,17 @@ class WebInterface(object):
s_comicid = None s_comicid = None
try: try:
s_issueid = watchchk['IssueID'] s_issueid = watchchk['IssueArcID']
except: except:
s_issueid = None s_issueid = None
logger.fdebug("-- NOT a watched series queue.") logger.fdebug("-- NOT a watched series queue.")
logger.fdebug(watchchk['ComicName'] + " -- #" + str(watchchk['IssueNumber'])) logger.fdebug('%s -- #%s' % (watchchk['ComicName'], watchchk['IssueNumber']))
logger.fdebug(u"Story Arc : " + str(SARC) + " queueing up the selected issue...") logger.fdebug('Story Arc : %s queueing up the selected issue...' % SARC)
logger.fdebug(u"IssueArcID : " + str(IssueArcID)) logger.fdebug('IssueArcID : %s' % IssueArcID)
try: try:
issueyear = watchchk['IssueYEAR'] issueyear = watchchk['IssueYEAR']
logger.fdebug('issueYEAR : ' + issueyear) logger.fdebug('issueYEAR : %s' % issueyear)
except: except:
try: try:
issueyear = watchchk['IssueDate'][:4] issueyear = watchchk['IssueDate'][:4]
@ -3678,39 +3738,55 @@ class WebInterface(object):
stdate = watchchk['ReleaseDate'] stdate = watchchk['ReleaseDate']
issdate = watchchk['IssueDate'] issdate = watchchk['IssueDate']
logger.fdebug('issueyear : ' + str(issueyear)) logger.fdebug('issueyear : %s' % issueyear)
logger.fdebug('comicname : ' + watchchk['ComicName']) logger.fdebug('comicname : %s' % watchchk['ComicName'])
logger.fdebug('issuenumber : ' + watchchk['IssueNumber']) logger.fdebug('issuenumber : %s' % watchchk['IssueNumber'])
logger.fdebug('comicyear : ' + watchchk['SeriesYear']) logger.fdebug('comicyear : %s' % watchchk['SeriesYear'])
#logger.info('publisher : ' + watchchk['IssuePublisher']) <-- no publisher in table #logger.info('publisher : ' + watchchk['IssuePublisher']) <-- no publisher in table
logger.fdebug('SARC : ' + SARC) logger.fdebug('SARC : %s' % SARC)
logger.fdebug('IssueArcID : ' + IssueArcID) logger.fdebug('IssueArcID : %s' % IssueArcID)
foundcom, prov = search.search_init(ComicName=watchchk['ComicName'], IssueNumber=watchchk['IssueNumber'], ComicYear=issueyear, SeriesYear=watchchk['SeriesYear'], Publisher=None, IssueDate=issdate, StoreDate=stdate, IssueID=s_issueid, SARC=SARC, IssueArcID=IssueArcID, oneoff=True) passinfo = {'issueid': s_issueid,
'comicname': watchchk['ComicName'],
'seriesyear': watchchk['SeriesYear'],
'comicid': s_comicid,
'issuenumber': watchchk['IssueNumber'],
'booktype': watchchk['Type']}
#foundcom, prov = search.search_init(ComicName=watchchk['ComicName'], IssueNumber=watchchk['IssueNumber'], ComicYear=issueyear, SeriesYear=watchchk['SeriesYear'], Publisher=None, IssueDate=issdate, StoreDate=stdate, IssueID=s_issueid, SARC=SARC, IssueArcID=IssueArcID, oneoff=True)
else: else:
# it's a watched series # it's a watched series
s_comicid = issuechk['ComicID'] s_comicid = issuechk['ComicID']
s_issueid = issuechk['IssueID'] s_issueid = issuechk['IssueID']
logger.fdebug("-- watched series queue.") logger.fdebug('-- watched series queue.')
logger.fdebug(issuechk['ComicName'] + " -- #" + str(issuechk['Issue_Number'])) logger.fdebug('%s -- #%s' % (issuechk['ComicName'], issuechk['Issue_Number']))
foundcom, prov = search.search_init(ComicName=issuechk['ComicName'], IssueNumber=issuechk['Issue_Number'], ComicYear=issuechk['IssueYear'], SeriesYear=issuechk['SeriesYear'], Publisher=None, IssueDate=None, StoreDate=issuechk['ReleaseDate'], IssueID=issuechk['IssueID'], AlternateSearch=None, UseFuzzy=None, ComicVersion=None, SARC=SARC, IssueArcID=IssueArcID, mode=None, rsscheck=None, ComicID=None) passinfo = {'issueid': s_issueid,
if foundcom['status'] is True: 'comicname': issuechk['ComicName'],
updater.foundsearch(s_comicid, s_issueid, mode=mode, provider=prov, SARC=SARC, IssueArcID=IssueArcID) 'seriesyear': issuechk['SeriesYear'],
else: 'comicid': s_comicid,
logger.fdebug('Watchlist issue not sucessfully found') 'issuenumber': issuechk['Issue_Number'],
logger.fdebug('issuearcid: ' + str(IssueArcID)) 'booktype': issuechk['Type']}
logger.fdebug('issueid: ' + str(s_issueid)) #foundcom, prov = search.search_init(ComicName=issuechk['ComicName'], IssueNumber=issuechk['Issue_Number'], ComicYear=issuechk['IssueYear'], SeriesYear=issuechk['SeriesYear'], Publisher=None, IssueDate=None, StoreDate=issuechk['ReleaseDate'], IssueID=issuechk['IssueID'], AlternateSearch=None, UseFuzzy=None, ComicVersion=None, SARC=SARC, IssueArcID=IssueArcID, mode=None, rsscheck=None, ComicID=None)
stupdate.append({"Status": "Wanted",
"IssueArcID": IssueArcID, mylar.SEARCH_QUEUE.put(passinfo)
"IssueID": s_issueid})
#if foundcom['status'] is True:
# updater.foundsearch(s_comicid, s_issueid, mode=mode, provider=prov, SARC=SARC, IssueArcID=IssueArcID)
#else:
# logger.fdebug('Watchlist issue not sucessfully found')
# logger.fdebug('issuearcid: %s' % IssueArcID)
# logger.fdebug('issueid: %s' % s_issueid)
# stupdate.append({"Status": "Wanted",
# "IssueArcID": IssueArcID,
# "IssueID": s_issueid})
if len(stupdate) > 0: if len(stupdate) > 0:
logger.fdebug(str(len(stupdate)) + ' issues need to get updated to Wanted Status') logger.fdebug('%s issues need to get updated to Wanted Status' % len(stupdate))
for st in stupdate: for st in stupdate:
ctrlVal = {'IssueArcID': st['IssueArcID']} ctrlVal = {'IssueArcID': st['IssueArcID']}
newVal = {'Status': st['Status']} newVal = {'Status': st['Status']}
if st['IssueID']: if st['IssueID']:
if st['IssueID']: if st['IssueID']:
logger.fdebug('issueid:' + str(st['IssueID'])) logger.fdebug('issueid: %s' %st['IssueID'])
newVal['IssueID'] = st['IssueID'] newVal['IssueID'] = st['IssueID']
myDB.upsert("storyarcs", newVal, ctrlVal) myDB.upsert("storyarcs", newVal, ctrlVal)
ReadGetWanted.exposed = True ReadGetWanted.exposed = True
@ -5672,18 +5748,23 @@ class WebInterface(object):
testslack.exposed = True testslack.exposed = True
def testrtorrent(self, host, username, password, auth, verify, ssl, rpc_url): def testrtorrent(self, host, username, password, auth, verify, rpc_url):
import torrent.clients.rtorrent as TorClient import torrent.clients.rtorrent as TorClient
client = TorClient.TorrentClient() client = TorClient.TorrentClient()
ca_bundle = None ca_bundle = None
if mylar.CONFIG.RTORRENT_CA_BUNDLE is not None: if mylar.CONFIG.RTORRENT_CA_BUNDLE is not None:
ca_bundle = mylar.CONFIG.RTORRENT_CA_BUNDLE ca_bundle = mylar.CONFIG.RTORRENT_CA_BUNDLE
if not client.connect(host, username, password, auth, verify, ssl, rpc_url, ca_bundle): rclient = client.connect(host, username, password, auth, verify, rpc_url, ca_bundle, test=True)
if not rclient:
logger.warn('Could not establish connection to %s' % host) logger.warn('Could not establish connection to %s' % host)
return 'Error establishing connection to Rtorrent' return '[rTorrent] Error establishing connection to Rtorrent'
else: else:
logger.info('Successfully validated connection to %s' % host) if rclient['status'] is False:
return "Successfully validated connection to %s" % host logger.warn('[rTorrent] Could not establish connection to %s. Error returned: %s' % (host, rclient['error']))
return 'Error establishing connection to rTorrent'
else:
logger.info('[rTorrent] Successfully validated connection to %s [v%s]' % (host, rclient['version']))
return 'Successfully validated rTorrent connection'
testrtorrent.exposed = True testrtorrent.exposed = True
def testqbit(self, host, username, password): def testqbit(self, host, username, password):
@ -5807,6 +5888,17 @@ class WebInterface(object):
test_32p.exposed = True test_32p.exposed = True
def check_ActiveDDL(self):
myDB = db.DBConnection()
active = myDB.selectone("SELECT * FROM DDL_INFO WHERE STATUS = 'Downloading'").fetchone()
if active is None:
return "There are no active downloads currently being attended to"
else:
filesize = os.stat(os.path.join(mylar.CONFIG.DDL_LOCATION, active['filename'])).st_size
cmath = int(float(filesize*100)/int(int(active['remote_filesize'])*100) * 100)
return "%s%s" % (cmath, '%')
check_ActiveDDL.exposed = True
def create_readlist(self, list=None, weeknumber=None, year=None): def create_readlist(self, list=None, weeknumber=None, year=None):
# ({ # ({
# "PUBLISHER": weekly['PUBLISHER'], # "PUBLISHER": weekly['PUBLISHER'],