FIX: Corrected issues in an arc being set to None when initially adding an arc, IMP: Changed arc searches & one-off pull-list searches to both use the search-queue, and other queues as required, IMP: Changed 'search for missing' in the arc options to follow search-queue workflow, IMP: Allowed arcs / pullist one-offs to be searched based on one-shot inclusion (ie. no issue number for one-shot issues), IMP: formatted logging lines for arcs to avoid logging errors, IMP: added code to allow requeuing of ddl downloads that fail to resume from point of failure, IMP: added code to display current percentage of ddl progress, IMP: added 'clear status' option to arc details to clear current status of a given issue within an arc, FIX: allow for series within an arc that don't populate a year properly to still have a year designated, IMP: Removed SSL tick box from rtorrent GUI configuration. If previously enabled, will auto-correct host to use https upon initial startup if required,

IMP: When adding a series, if there's only one issue available (and it's not ongoing) - mark it as one-shot edition to allow for snatches with no issue number,
IMP: Ignore Week+/Week packs for the time being when using the DDL provider option, FIX: When weekly pull could not be retrieved, would drop to alt_pull=0 option temporarily. If config was saved, would overwrite current alt_pull setting and cause subsequent problems when retrieving the pull, FIX: Fixed some post-processing problems when post-processing story-arc issues
This commit is contained in:
evilhero 2019-02-19 12:02:05 -05:00
parent 47477eea26
commit e0f62ba173
12 changed files with 456 additions and 290 deletions

View File

@ -584,14 +584,12 @@
<div class="row">
<label>rTorrent Host:port(optional)</label>
<input type="text" id="rtorrent_host" name="rtorrent_host" value="${config['rtorrent_host']}" size="30">
<small>ie. my.rtorrent:80, 192.168.1.1, scgi://localhost:5000</small>
</div>
<div class="row checkbox left clearfix">
<input id="rtorrent_ssl" type="checkbox" onclick="initConfigCheckbox($this));" name="rtorrent_ssl" value="1" ${config['rtorrent_ssl']} /><label>SSL</label>
<small>ie. https://my.rtorrent, http://192.168.1.1, http://localhost:80, scgi://localhost:5000</small>
</div>
<div class="config">
<div class="row checkbox left clearfix">
<input id="rtorrent_verify" type="checkbox" name="rtorrent_verify" value="1" ${config['rtorrent_verify']} /><label>Verify SSL</label>
<small>(only optionally used for https connections)</small>
</div>
</div>
<div class="row">
@ -2200,10 +2198,9 @@
var password = document.getElementById("rtorrent_password").value;
var auth = document.getElementById("rtorrent_authentication").value;
var verify = document.getElementById("rtorrent_verify").value;
var ssl = document.getElementById("rtorrent_ssl").value;
var rpc_url = document.getElementById("rtorrent_rpc_url").value;
$.get("testrtorrent",
{ host: host, username: username, password: password, auth: auth, verify: verify, ssl: ssl, rpc_url: rpc_url },
{ host: host, username: username, password: password, auth: auth, verify: verify, rpc_url: rpc_url },
function(data){
if (data.error != undefined) {
alert(data.error);
@ -2480,7 +2477,6 @@
initConfigCheckbox("#enforce_perms");
initConfigCheckbox("#enable_api");
initConfigCheckbox("#sab_to_mylar");
initConfigCheckbox("#rtorrent_ssl");
initConfigCheckbox("#usenewznab");
initConfigCheckbox("#enable_torznab");
initConfigCheckbox("#usenzbsu");

View File

@ -208,6 +208,7 @@
<a href="#" onclick="doAjaxCall('queueit?ComicName=${item['ComicName'] | u}&ComicIssue=${item['IssueNumber']}&ComicYear=${issuedate}&mode=readlist&SARC=${item['StoryArc']}&IssueArcID=${item['IssueArcID']}&SeriesYear=${item['SeriesYear']}',$(this),'table')" data-success="Now searching for ${item['ComicName']} #${item['IssueNumber']}"><span class="ui-icon ui-icon-plus"></span>Grab</a>
%elif item['Status'] == 'Snatched':
<a href="#" onclick="doAjaxCall('queueit?ComicName=${item['ComicName'] | u}&ComicIssue=${item['IssueNumber']}&ComicYear=${issuedate}&mode=readlist&SARC=${item['StoryArc']}&IssueArcID=${item['IssueArcID']}&SeriesYear=${item['SeriesYear']}',$(this),'table')" data-success="Trying to search again for issue"><span class="ui-icon ui-icon-plus"></span>Retry</a>
<a href="#" onclick="doAjaxCall('clear_arcstatus?issuearcid=${item['IssueArcID']}',$(this),'table')"><data success="Clearing status of ${item['Status']} for ${item['ComicName']} #${item['IssueNumber']}"><span class="ui-icon ui-icon-plus"></span>Clear Status</a>
%elif item['Status'] == 'Downloaded' and item['Location'] is not None:
<a href="downloadthis?pathfile=${item['Location'] |u}"><span class="ui-icon ui-icon-plus"></span>Download</a>
%endif

View File

@ -202,6 +202,7 @@
<a href="#" onclick="doAjaxCall('queueit?ComicName=${item['ComicName'] | u}&ComicIssue=${item['IssueNumber']}&ComicYear=${issuedate}&mode=readlist&SARC=${item['StoryArc']}&IssueArcID=${item['IssueArcID']}&SeriesYear=${item['SeriesYear']}',$(this),'table')" data-success="Now searching for ${item['ComicName']} #${item['IssueNumber']}"><span class="ui-icon ui-icon-plus"></span>Grab</a>
%elif item['Status'] == 'Snatched':
<a href="#" onclick="doAjaxCall('queueit?ComicName=${item['ComicName'] | u}&ComicIssue=${item['IssueNumber']}&ComicYear=${issuedate}&mode=readlist&SARC=${item['StoryArc']}&IssueArcID=${item['IssueArcID']}&SeriesYear=${item['SeriesYear']}',$(this),'table')" data-success="Trying to search again for issue"><span class="ui-icon ui-icon-plus"></span>Retry</a>
<a href="#" onclick="doAjaxCall('clear_arcstatus?issuearcid=${item['IssueArcID']}',$(this),'table')"><data success="Clearing status of ${item['Status']} for ${item['ComicName']} #${item['IssueNumber']}"><span class="ui-icon ui-icon-plus"></span>Clear Status</a>
%elif item['Status'] == 'Downloaded' and item['Location'] is not None:
<a href="downloadthis?pathfile=${item['Location'] |u}"><span class="ui-icon ui-icon-plus"></span>Download</a>
%endif

View File

@ -143,7 +143,7 @@
<td class="options">
%if weekly['HAVEIT'] == 'OneOff':
%if weekly['STATUS'] == 'Snatched' or weekly['STATUS'] == 'Downloaded':
<a href="#" onclick="doAjaxCall('queueit?ComicName=${weekly['COMIC'] | u}&ComicID=${weekly['COMICID']}&IssueID=${weekly['ISSUEID']}&ComicIssue=${weekly['ISSUE']}&mode=pullwant&Publisher=${weekly['PUBLISHER']}&pullinfo=${weekinfo['midweek']}&pullweek=${weekinfo['weeknumber']}&pullyear=${weekinfo['year']}',$(this),'table')" data-success="Successfully submitted search request for ${weekly['COMIC']} #${weekly['ISSUE']}" title="Snatch issue again as a One-Off">
<a href="#" onclick="doAjaxCall('queueit?ComicName=${weekly['COMIC'] | u}&ComicID=${weekly['COMICID']}&IssueID=${weekly['ISSUEID']}&ComicIssue=${weekly['ISSUE']}&mode=pullwant&Publisher=${weekly['PUBLISHER']}&pullinfo=${weekinfo['midweek']}&pullweek=${weekinfo['weeknumber']}&pullyear=${weekinfo['year']}&BookType=${weekly['FORMAT']}',$(this),'table')" data-success="Successfully submitted search request for ${weekly['COMIC']} #${weekly['ISSUE']}" title="Snatch issue again as a One-Off">
%if mylar.CONFIG.SHOW_ICONS:
<img style="margin: 0px 5px" src="interfaces/default/images/retry.png" height="25" width="25" class="highqual" />
%else:
@ -193,7 +193,7 @@
<% dl = False %>
%endif
%if weekly['HAVEIT'] == 'No' and weekly['STATUS'] == 'Skipped':
<a href="#" onclick="doAjaxCall('queueit?ComicName=${weekly['COMIC'] | u}&ComicID=${weekly['COMICID']}&IssueID=${weekly['ISSUEID']}&ComicIssue=${weekly['ISSUE']}&mode=pullwant&Publisher=${weekly['PUBLISHER']}&pullinfo=${weekinfo['midweek']}&pullweek=${weekinfo['weeknumber']}&pullyear=${weekinfo['year']}',$(this),'table')" data-success="Successfully submitted search request for ${weekly['COMIC']} #${weekly['ISSUE']}" title="One off download">
<a href="#" onclick="doAjaxCall('queueit?ComicName=${weekly['COMIC'] | u}&ComicID=${weekly['COMICID']}&IssueID=${weekly['ISSUEID']}&ComicIssue=${weekly['ISSUE']}&mode=pullwant&Publisher=${weekly['PUBLISHER']}&pullinfo=${weekinfo['midweek']}&pullweek=${weekinfo['weeknumber']}&pullyear=${weekinfo['year']}&BookType=${weekly['FORMAT']}',$(this),'table')" data-success="Successfully submitted search request for ${weekly['COMIC']} #${weekly['ISSUE']}" title="One off download">
%if mylar.CONFIG.SHOW_ICONS:
<img style="margin: 0px 5px" src="interfaces/default/images/search.png" height="25" width="25" class="highqual" />
%else:
@ -233,7 +233,7 @@
%endif
</a>
%elif weekly['STATUS'] == 'Snatched':
<a href="#" onclick="doAjaxCall('queueit?ComicName=${weekly['COMIC'] | u}&ComicID=${weekly['COMICID']}&IssueID=${weekly['ISSUEID']}&ComicIssue=${weekly['ISSUE']}&mode=pullwant&Publisher=${weekly['PUBLISHER']}&pullinfo=${weekinfo['midweek']}&pullweek=${weekinfo['weeknumber']}&pullyear=${weekinfo['year']}',$(this),'table')" data-success="Successfully submitted search request for ${weekly['COMIC']} #${weekly['ISSUE']}" title="Snatch issue again">
<a href="#" onclick="doAjaxCall('queueit?ComicName=${weekly['COMIC'] | u}&ComicID=${weekly['COMICID']}&IssueID=${weekly['ISSUEID']}&ComicIssue=${weekly['ISSUE']}&mode=pullwant&Publisher=${weekly['PUBLISHER']}&pullinfo=${weekinfo['midweek']}&pullweek=${weekinfo['weeknumber']}&pullyear=${weekinfo['year']}&BookType=${weekly['FORMAT']}',$(this),'table')" data-success="Successfully submitted search request for ${weekly['COMIC']} #${weekly['ISSUE']}" title="Snatch issue again">
%if mylar.CONFIG.SHOW_ICONS:
<img style="margin: 0px 5px" src="interfaces/default/images/retry.png" height="25" width="25" class="highqual" />
%else:

View File

@ -1436,55 +1436,60 @@ class PostProcessor(object):
if (all([self.nzb_name != 'Manual Run', self.apicall is False]) or (self.oneoffinlist is True or all([self.issuearcid is not None, self.issueid is None]))) and not self.nzb_name.startswith('0-Day'): # and all([self.issueid is None, self.comicid is None, self.apicall is False]):
ppinfo = []
if self.oneoffinlist is False:
nzbname = self.nzb_name
#remove extensions from nzb_name if they somehow got through (Experimental most likely)
if nzbname.lower().endswith(self.extensions):
fd, ext = os.path.splitext(nzbname)
self._log("Removed extension from nzb: " + ext)
nzbname = re.sub(str(ext), '', str(nzbname))
#replace spaces
# let's change all space to decimals for simplicity
logger.fdebug('[NZBNAME]: ' + nzbname)
#gotta replace & or escape it
nzbname = re.sub("\&", 'and', nzbname)
nzbname = re.sub('[\,\:\?\'\+]', '', nzbname)
nzbname = re.sub('[\(\)]', ' ', nzbname)
logger.fdebug('[NZBNAME] nzbname (remove chars): ' + nzbname)
nzbname = re.sub('.cbr', '', nzbname).strip()
nzbname = re.sub('.cbz', '', nzbname).strip()
nzbname = re.sub('[\.\_]', ' ', nzbname).strip()
nzbname = re.sub('\s+', ' ', nzbname) #make sure we remove the extra spaces.
logger.fdebug('[NZBNAME] nzbname (remove extensions, double spaces, convert underscores to spaces): ' + nzbname)
nzbname = re.sub('\s', '.', nzbname)
logger.fdebug('%s After conversions, nzbname is : %s' % (module, nzbname))
# if mylar.USE_NZBGET==1:
# nzbname=self.nzb_name
self._log("nzbname: %s" % nzbname)
nzbiss = myDB.selectone("SELECT * from nzblog WHERE nzbname=? or altnzbname=?", [nzbname, nzbname]).fetchone()
self.oneoff = False
if nzbiss is None:
self._log("Failure - could not initially locate nzbfile in my database to rename.")
logger.fdebug('%s Failure - could not locate nzbfile initially' % module)
# if failed on spaces, change it all to decimals and try again.
nzbname = re.sub('[\(\)]', '', str(nzbname))
self._log("trying again with this nzbname: %s" % nzbname)
logger.fdebug('%s Trying to locate nzbfile again with nzbname of : %s' % (module, nzbname))
nzbiss = myDB.selectone("SELECT * from nzblog WHERE nzbname=? or altnzbname=?", [nzbname, nzbname]).fetchone()
if nzbiss is None:
logger.error('%s Unable to locate downloaded file within items I have snatched. Attempting to parse the filename directly and process.' % module)
#set it up to run manual post-processing on self.nzb_folder
self._log('Unable to locate downloaded file within items I have snatched. Attempting to parse the filename directly and process.')
self.valreturn.append({"self.log": self.log,
"mode": 'outside'})
return self.queue.put(self.valreturn)
if any([self.issueid is not None, self.issuearcid is not None]):
if self.issueid is not None:
s_id = self.issueid
else:
self._log("I corrected and found the nzb as : %s" % nzbname)
logger.fdebug('%s Auto-corrected and found the nzb as : %s' % (module, nzbname))
#issueid = nzbiss['IssueID']
s_id = self.issuearcid
nzbiss = myDB.selectone('SELECT * FROM nzblog WHERE IssueID=?', [s_id]).fetchone()
else:
nzbname = self.nzb_name
#remove extensions from nzb_name if they somehow got through (Experimental most likely)
if nzbname.lower().endswith(self.extensions):
fd, ext = os.path.splitext(nzbname)
self._log("Removed extension from nzb: " + ext)
nzbname = re.sub(str(ext), '', str(nzbname))
#replace spaces
# let's change all space to decimals for simplicity
logger.fdebug('[NZBNAME]: ' + nzbname)
#gotta replace & or escape it
nzbname = re.sub("\&", 'and', nzbname)
nzbname = re.sub('[\,\:\?\'\+]', '', nzbname)
nzbname = re.sub('[\(\)]', ' ', nzbname)
logger.fdebug('[NZBNAME] nzbname (remove chars): ' + nzbname)
nzbname = re.sub('.cbr', '', nzbname).strip()
nzbname = re.sub('.cbz', '', nzbname).strip()
nzbname = re.sub('[\.\_]', ' ', nzbname).strip()
nzbname = re.sub('\s+', ' ', nzbname) #make sure we remove the extra spaces.
logger.fdebug('[NZBNAME] nzbname (remove extensions, double spaces, convert underscores to spaces): ' + nzbname)
nzbname = re.sub('\s', '.', nzbname)
logger.fdebug('%s After conversions, nzbname is : %s' % (module, nzbname))
self._log("nzbname: %s" % nzbname)
nzbiss = myDB.selectone("SELECT * from nzblog WHERE nzbname=? or altnzbname=?", [nzbname, nzbname]).fetchone()
if nzbiss is None:
self._log("Failure - could not initially locate nzbfile in my database to rename.")
logger.fdebug('%s Failure - could not locate nzbfile initially' % module)
# if failed on spaces, change it all to decimals and try again.
nzbname = re.sub('[\(\)]', '', str(nzbname))
self._log("trying again with this nzbname: %s" % nzbname)
logger.fdebug('%s Trying to locate nzbfile again with nzbname of : %s' % (module, nzbname))
nzbiss = myDB.selectone("SELECT * from nzblog WHERE nzbname=? or altnzbname=?", [nzbname, nzbname]).fetchone()
if nzbiss is None:
logger.error('%s Unable to locate downloaded file within items I have snatched. Attempting to parse the filename directly and process.' % module)
#set it up to run manual post-processing on self.nzb_folder
self._log('Unable to locate downloaded file within items I have snatched. Attempting to parse the filename directly and process.')
self.valreturn.append({"self.log": self.log,
"mode": 'outside'})
return self.queue.put(self.valreturn)
else:
self._log("I corrected and found the nzb as : %s" % nzbname)
logger.fdebug('%s Auto-corrected and found the nzb as : %s' % (module, nzbname))
#issueid = nzbiss['IssueID']
issueid = nzbiss['IssueID']
logger.fdebug('%s Issueid: %s' % (module, issueid))
@ -1940,7 +1945,8 @@ class PostProcessor(object):
'ComicName': tinfo['comicname'],
'IssueNumber': tinfo['issuenumber'],
'Publisher': tinfo['publisher'],
'OneOff': tinfo['oneoff']}
'OneOff': tinfo['oneoff'],
'ForcedMatch': False}
else:

View File

@ -382,7 +382,7 @@ class Config(object):
count = sum(1 for line in open(self._config_file))
else:
count = 0
self.newconfig = 8
self.newconfig = 9
if count == 0:
CONFIG_VERSION = 0
MINIMALINI = False
@ -502,13 +502,12 @@ class Config(object):
shutil.move(self._config_file, os.path.join(mylar.DATA_DIR, 'config.ini.backup'))
except:
print('Unable to make proper backup of config file in %s' % os.path.join(mylar.DATA_DIR, 'config.ini.backup'))
if self.newconfig == 8:
if self.newconfig == 9:
print('Attempting to update configuration..')
#torznab multiple entries merged into extra_torznabs value
self.config_update()
setattr(self, 'CONFIG_VERSION', str(self.newconfig))
config.set('General', 'CONFIG_VERSION', str(self.newconfig))
print('Updating config to newest version : %s' % self.newconfig)
self.writeconfig()
else:
self.provider_sequence()
@ -537,8 +536,8 @@ class Config(object):
return self
def config_update(self):
if self.newconfig == 8:
print('Updating Configuration from %s to %s' % (self.CONFIG_VERSION, self.newconfig))
print('Updating Configuration from %s to %s' % (self.CONFIG_VERSION, self.newconfig))
if self.CONFIG_VERSION < 8:
print('Checking for existing torznab configuration...')
if not any([self.TORZNAB_NAME is None, self.TORZNAB_HOST is None, self.TORZNAB_APIKEY is None, self.TORZNAB_CATEGORY is None]):
torznabs =[(self.TORZNAB_NAME, self.TORZNAB_HOST, self.TORZNAB_APIKEY, self.TORZNAB_CATEGORY, str(int(self.ENABLE_TORZNAB)))]
@ -552,7 +551,14 @@ class Config(object):
config.remove_option('Torznab', 'torznab_apikey')
config.remove_option('Torznab', 'torznab_category')
config.remove_option('Torznab', 'torznab_verify')
print('Successfully removed old entries.')
print('Successfully removed outdated config entries.')
if self.newconfig == 9:
#rejig rtorrent settings due to change.
if all([self.RTORRENT_SSL is True, not self.RTORRENT_HOST.startswith('http')]):
self.RTORRENT_HOST = 'https://' + self.RTORRENT_HOST
config.set('Rtorrent', 'rtorrent_host', self.RTORRENT_HOST)
config.remove_option('Rtorrent', 'rtorrent_ssl')
print('Successfully removed oudated config entries.')
print('Configuration upgraded to version %s' % self.newconfig)
def check_section(self, section, key):
@ -731,6 +737,11 @@ class Config(object):
def configure(self, update=False):
#force alt_pull = 2 on restarts regardless of settings
if self.ALT_PULL != 2:
self.ALT_PULL = 2
config.set('Weekly', 'alt_pull', str(self.ALT_PULL))
try:
if not any([self.SAB_HOST is None, self.SAB_HOST == '', 'http://' in self.SAB_HOST[:7], 'https://' in self.SAB_HOST[:8]]):
self.SAB_HOST = 'http://' + self.SAB_HOST

View File

@ -116,6 +116,9 @@ class GC(object):
title = re.sub(issues, '', title).strip()
if title.endswith('#'):
title = title[:-1].strip()
else:
if any(['Marvel Week+' in title, 'INDIE Week+' in title, 'Image Week' in title, 'DC Week+' in title]):
continue
option_find = f.find("p", {"style": "text-align: center;"})
i = 0
@ -156,7 +159,6 @@ class GC(object):
logger.fdebug('%s [%s]' % (title, size))
results['entries'] = resultlist
return results
def parse_downloadresults(self, id, mainlink):
@ -236,13 +238,15 @@ class GC(object):
logger.fdebug('[%s] %s - %s' % (x['site'], x['volume'], x['link']))
ctrlval = {'id': id}
vals = {'series': series,
'year': year,
'size': size,
'issueid': self.issueid,
'comicid': self.comicid,
'link': link,
'status': 'Queued'}
vals = {'series': series,
'year': year,
'size': size,
'issueid': self.issueid,
'comicid': self.comicid,
'link': link,
'mainlink': mainlink,
'updated_date': datetime.datetime.now().strftime('%Y-%m-%d %H:%M'),
'status': 'Queued'}
myDB.upsert('ddl_info', vals, ctrlval)
mylar.DDL_QUEUE.put({'link': link,
@ -252,11 +256,12 @@ class GC(object):
'size': size,
'comicid': self.comicid,
'issueid': self.issueid,
'id': id})
'id': id,
'resume': None})
return {'success': True}
def downloadit(self, id, link, mainlink):
def downloadit(self, id, link, mainlink, resume=None):
if mylar.DDL_LOCK is True:
logger.fdebug('[DDL] Another item is currently downloading via DDL. Only one item can be downloaded at a time using DDL. Patience.')
return
@ -267,25 +272,47 @@ class GC(object):
filename = None
try:
with cfscrape.create_scraper() as s:
if resume is not None:
logger.info('[DDL-RESUME] Attempting to resume from: %s bytes' % resume)
self.headers['Range'] = 'bytes=%d-' % resume
cf_cookievalue, cf_user_agent = s.get_tokens(mainlink, headers=self.headers)
t = s.get(link, verify=True, cookies=cf_cookievalue, headers=self.headers, stream=True)
filename = os.path.basename(urllib.unquote(t.url).decode('utf-8'))
path = os.path.join(mylar.CONFIG.DDL_LOCATION, filename)
try:
remote_filesize = int(t.headers['Content-length'])
logger.fdebug('remote filesize: %s' % remote_filesize)
except Exception as e:
logger.warn('[WARNING] Unable to retrieve remote file size. Error returned as : %s' % e)
remote_filesize = 0
mylar.DDL_LOCK = False
return ({"success": False,
"filename": filename,
"path": None})
else:
#write the filename to the db for tracking purposes...
myDB.upsert('ddl_info', {'filename': filename, 'remote_filesize': remote_filesize}, {'id': id})
#write the filename to the db for tracking purposes...
myDB.upsert('ddl_info', {'filename': filename}, {'id': id})
path = os.path.join(mylar.CONFIG.DDL_LOCATION, filename)
if t.headers.get('content-encoding') == 'gzip': #.get('Content-Encoding') == 'gzip':
buf = StringIO(t.content)
f = gzip.GzipFile(fileobj=buf)
with open(path, 'wb') as f:
for chunk in t.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
if resume is not None:
with open(path, 'ab') as f:
for chunk in t.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
f.flush()
else:
with open(path, 'wb') as f:
for chunk in t.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
f.flush()
except Exception as e:
logger.error('[ERROR] %s' % e)

View File

@ -3045,14 +3045,18 @@ def ddl_downloader(queue):
#write this to the table so we have a record of what's going on.
ctrlval = {'id': item['id']}
val = {'status': 'Downloading'}
val = {'status': 'Downloading',
'updated_date': datetime.datetime.now().strftime('%Y-%m-%d %H:%M')}
myDB.upsert('ddl_info', val, ctrlval)
ddz = getcomics.GC()
ddzstat = ddz.downloadit(item['id'], item['link'], item['mainlink'])
ddzstat = ddz.downloadit(item['id'], item['link'], item['mainlink'], item['resume'])
nval = {'status': 'Completed'}
myDB.upsert('ddl_info', nval, ctrlval)
if ddzstat['success'] is True:
tdnow = datetime.datetime.now()
nval = {'status': 'Completed',
'updated_date': tdnow.strftime('%Y-%m-%d %H:%M')}
myDB.upsert('ddl_info', nval, ctrlval)
if all([ddzstat['success'] is True, mylar.CONFIG.POST_PROCESSING is True]):
try:
@ -3076,10 +3080,13 @@ def ddl_downloader(queue):
'ddl': True})
except Exception as e:
logger.info('process error: %s [%s]' %(e, ddzstat))
elif mylar.CONFIG.POST_PROCESSING is True:
elif all([ddzstat['success'] is True, mylar.CONFIG.POST_PROCESSING is False]):
logger.info('File successfully downloaded. Post Processing is not enabled - item retained here: %s' % os.path.join(ddzstat['path'],ddzstat['filename']))
else:
logger.info('[Status: %s] Failed to download: %s ' % (ddzstat['success'], ddzstat))
nval = {'status': 'Failed',
'updated_date': datetime.datetime.now().strftime('%Y-%m-%d %H:%M')}
myDB.upsert('ddl_info', nval, ctrlval)
def postprocess_main(queue):
while True:

View File

@ -291,6 +291,15 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
else:
aliases = aliases
logger.fdebug('comicIssues: %s' % comicIssues)
logger.fdebug('seriesyear: %s / currentyear: %s' % (SeriesYear, helpers.today()[:4]))
logger.fdebug('comicType: %s' % comic['Type'])
if all([int(comicIssues) == 1, SeriesYear < helpers.today()[:4], comic['Type'] != 'One-Shot', comic['Type'] != 'TPB']):
logger.info('Determined to be a one-shot issue. Forcing Edition to One-Shot')
booktype = 'One-Shot'
else:
booktype = comic['Type']
controlValueDict = {"ComicID": comicid}
newValueDict = {"ComicName": comic['ComicName'],
"ComicSortName": sortname,
@ -309,7 +318,7 @@ def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=No
"AlternateSearch": aliases,
# "ComicPublished": gcdinfo['resultPublished'],
"ComicPublished": "Unknown",
"Type": comic['Type'],
"Type": booktype,
"Corrected_Type": comic['Corrected_Type'],
"Collects": issue_list,
"DateAdded": helpers.today(),

View File

@ -41,7 +41,6 @@ class RTorrent(object):
mylar.CONFIG.RTORRENT_PASSWORD,
mylar.CONFIG.RTORRENT_AUTHENTICATION,
mylar.CONFIG.RTORRENT_VERIFY,
mylar.CONFIG.RTORRENT_SSL,
mylar.CONFIG.RTORRENT_RPC_URL,
mylar.CONFIG.RTORRENT_CA_BUNDLE):
logger.error('[ERROR] Could not connect to %s - exiting' % mylar.CONFIG.RTORRENT_HOST)

View File

@ -23,23 +23,37 @@ class TorrentClient(object):
# Use default ssl verification
return True
def connect(self, host, username, password, auth, verify, ssl, rpc_url, ca_bundle):
def connect(self, host, username, password, auth, verify, rpc_url, ca_bundle, test=False):
if self.conn is not None:
return self.conn
if not host:
return False
return {'status': False, 'error': 'No host specified'}
url = helpers.cleanHost(host, protocol = True, ssl = ssl)
url = host
if host.startswith('https:'):
ssl = True
else:
if not host.startswith('http://'):
url = 'http://' + url
ssl = False
#add on the slash ..
if not url.endswith('/'):
url += '/'
#url = helpers.cleanHost(host, protocol = True, ssl = ssl)
# Automatically add '+https' to 'httprpc' protocol if SSL is enabled
if ssl is True and url.startswith('httprpc://'):
url = url.replace('httprpc://', 'httprpc+https://')
#if ssl is True and url.startswith('httprpc://'):
# url = url.replace('httprpc://', 'httprpc+https://')
#if ssl is False and not url.startswith('http://'):
# url = 'http://' + url
parsed = urlparse(url)
#parsed = urlparse(url)
# rpc_url is only used on http/https scgi pass-through
if parsed.scheme in ['http', 'https']:
if rpc_url is not None:
url += rpc_url
#logger.fdebug(url)
@ -52,8 +66,8 @@ class TorrentClient(object):
verify_ssl=self.getVerifySsl(verify, ca_bundle)
)
except Exception as err:
logger.error('Failed to connect to rTorrent: %s', err)
return False
logger.error('Make sure you have the right protocol specified for the rtorrent host. Failed to connect to rTorrent - error: %s.' % err)
return {'status': False, 'error': err}
else:
logger.fdebug('NO username %s / NO password %s' % (username, password))
try:
@ -63,10 +77,13 @@ class TorrentClient(object):
verify_ssl=self.getVerifySsl(verify, ca_bundle)
)
except Exception as err:
logger.error('Failed to connect to rTorrent: %s', err)
return False
logger.error('Failed to connect to rTorrent: %s' % err)
return {'status': False, 'error': err}
return self.conn
if test is True:
return {'status': True, 'version': self.conn.get_client_version()}
else:
return self.conn
def find_torrent(self, hash):
return self.conn.find_torrent(hash)

View File

@ -516,7 +516,7 @@ class WebInterface(object):
except Exception, e:
logger.warn('Unable to download image from CV URL link - possibly no arc picture is present: %s' % imageurl)
else:
logger.fdebug('comic image retrieval status code: ' + str(r.status_code))
logger.fdebug('comic image retrieval status code: %s' % r.status_code)
if str(r.status_code) != '200':
logger.warn('Unable to download image from CV URL link: %s [Status Code returned: %s]' % (imageurl, r.status_code))
@ -532,8 +532,8 @@ class WebInterface(object):
f.flush()
arc_results = mylar.cv.getComic(comicid=None, type='issue', arcid=arcid, arclist=arclist)
logger.fdebug(module + ' Arcresults: ' + str(arc_results))
logger.fdebug('arclist: ' + str(arclist))
logger.fdebug('%s Arcresults: %s' % (module, arc_results))
logger.fdebug('%s Arclist: %s' % (module, arclist))
if len(arc_results) > 0:
import random
@ -610,7 +610,7 @@ class WebInterface(object):
readingorder = int(re.sub('[\,\|]','', rosre).strip())
else:
readingorder = 0
logger.fdebug('[' + str(readingorder) + '] issueid:' + str(issid) + ' - findorder#:' + str(findorder))
logger.fdebug('[%s] issueid: %s - findorder#: %s' % (readingorder, issid, findorder))
issuedata.append({"ComicID": comicid,
"IssueID": issid,
@ -628,7 +628,7 @@ class WebInterface(object):
"Manual": manual_mod})
n+=1
comicid_results = mylar.cv.getComic(comicid=None, type='comicyears', comicidlist=cidlist)
logger.fdebug(module + ' Initiating issue updating - just the info')
logger.fdebug('%s Initiating issue updating - just the info' % module)
for AD in issuedata:
seriesYear = 'None'
@ -1335,9 +1335,9 @@ class WebInterface(object):
threading.Thread(target=self.queueissue, kwargs=kwargs).start()
queueit.exposed = True
def queueissue(self, mode, ComicName=None, ComicID=None, ComicYear=None, ComicIssue=None, IssueID=None, new=False, redirect=None, SeriesYear=None, SARC=None, IssueArcID=None, manualsearch=None, Publisher=None, pullinfo=None, pullweek=None, pullyear=None, manual=False, ComicVersion=None):
logger.fdebug('ComicID:' + str(ComicID))
logger.fdebug('mode:' + str(mode))
def queueissue(self, mode, ComicName=None, ComicID=None, ComicYear=None, ComicIssue=None, IssueID=None, new=False, redirect=None, SeriesYear=None, SARC=None, IssueArcID=None, manualsearch=None, Publisher=None, pullinfo=None, pullweek=None, pullyear=None, manual=False, ComicVersion=None, BookType=None):
logger.fdebug('ComicID: %s' % ComicID)
logger.fdebug('mode: %s' % mode)
now = datetime.datetime.now()
myDB = db.DBConnection()
#mode dictates type of queue - either 'want' for individual comics, or 'series' for series watchlist.
@ -1359,8 +1359,8 @@ class WebInterface(object):
SARC = True
IssueArcID = None
else:
logger.info(u"Story Arc : " + str(SARC) + " queueing selected issue...")
logger.info(u"IssueArcID : " + str(IssueArcID))
logger.info('Story Arc : %s queueing selected issue...' % SARC)
logger.fdebug('IssueArcID : %s' % IssueArcID)
#try to load the issue dates - can now sideload issue details.
dateload = myDB.selectone('SELECT * FROM storyarcs WHERE IssueArcID=?', [IssueArcID]).fetchone()
if dateload is None:
@ -1373,25 +1373,24 @@ class WebInterface(object):
ReleaseDate = dateload['ReleaseDate']
Publisher = dateload['IssuePublisher']
SeriesYear = dateload['SeriesYear']
BookType = dateload['Type']
if ComicYear is None: ComicYear = SeriesYear
if dateload['Volume'] is None:
logger.info('Marking ' + ComicName + ' #' + ComicIssue + ' as wanted...')
logger.info('Marking %s #%s as wanted...' % (ComicName, ComicIssue))
else:
logger.info('Marking ' + ComicName + ' (' + dateload['Volume'] + ') #' + ComicIssue + ' as wanted...')
logger.fdebug('publisher: ' + Publisher)
logger.info('Marking %s (%s) #%s as wanted...' % (ComicName, dateload['Volume'], ComicIssue))
logger.fdebug('publisher: %s' % Publisher)
controlValueDict = {"IssueArcID": IssueArcID}
newStatus = {"Status": "Wanted"}
myDB.upsert("storyarcs", newStatus, controlValueDict)
logger.info('[STORY-ARCS] Now Queuing %s (%s) #%s for search' % (ComicName, ComicYear, ComicIssue))
s = mylar.SEARCH_QUEUE.put({'issueid': IssueArcID, 'comicname': ComicName, 'seriesyear': ComicYear, 'comicid': ComicID, 'issuenumber': ComicIssue})
#foundcom, prov = search.search_init(ComicName=ComicName, IssueNumber=ComicIssue, ComicYear=ComicYear, SeriesYear=None, Publisher=Publisher, IssueDate=IssueDate, StoreDate=ReleaseDate, IssueID=None, AlternateSearch=None, UseFuzzy=None, ComicVersion=dateload['Volume'], SARC=SARC, IssueArcID=IssueArcID)
#if foundcom['status'] is True:
# logger.info(u"Downloaded " + ComicName + " #" + ComicIssue + " (" + str(ComicYear) + ")")
# controlValueDict = {"IssueArcID": IssueArcID}
# newStatus = {"Status": "Snatched"}
#myDB.upsert("storyarcs", newStatus, controlValueDict)
return # foundcom
moduletype = '[STORY-ARCS]'
passinfo = {'issueid': IssueArcID,
'comicname': ComicName,
'seriesyear': SeriesYear,
'comicid': ComicID,
'issuenumber': ComicIssue,
'booktype': BookType}
elif mode == 'pullwant': #and ComicID is None
#this is for marking individual comics from the pullist to be downloaded.
@ -1400,19 +1399,17 @@ class WebInterface(object):
#better to set both to some generic #, and then filter out later...
IssueDate = pullinfo
try:
ComicYear = IssueDate[:4]
SeriesYear = IssueDate[:4]
except:
ComicYear == now.year
SeriesYear == now.year
if Publisher == 'COMICS': Publisher = None
logger.info('Now Queuing %s %s for search' % (ComicName, ComicIssue))
s = mylar.SEARCH_QUEUE.put({'issueid': IssueID, 'comicname': ComicName, 'seriesyear': ComicYear, 'comicid': ComicID, 'issuenumber': ComicIssue})
#foundcom, prov = search.search_init(ComicName=ComicName, IssueNumber=ComicIssue, ComicYear=ComicYear, SeriesYear=None, Publisher=Publisher, IssueDate=IssueDate, StoreDate=IssueDate, IssueID=IssueID, ComicID=ComicID, AlternateSearch=None, mode=mode, UseFuzzy=None, ComicVersion=ComicVersion, allow_packs=False, manual=manual)
if manual is True:
return foundcom
#if foundcom['status'] is True:
#logger.info('[ONE-OFF MODE] Successfully Downloaded ' + ComicName + ' ' + ComicIssue)
#return updater.foundsearch(ComicID, IssueID, mode=mode, provider=prov, hash=foundcom['info']['t_hash'], pullinfo={'weeknumber': pullweek, 'year': pullyear})
return
moduletype = '[PULL-LIST]'
passinfo = {'issueid': IssueID,
'comicname': ComicName,
'seriesyear': SeriesYear,
'comicid': ComicID,
'issuenumber': ComicIssue,
'booktype': BookType}
elif mode == 'want' or mode == 'want_ann' or manualsearch:
cdname = myDB.selectone("SELECT * from comics where ComicID=?", [ComicID]).fetchone()
@ -1430,9 +1427,9 @@ class WebInterface(object):
newStatus = {"Status": "Wanted"}
if mode == 'want':
if manualsearch:
logger.info('Initiating manual search for ' + ComicName + ' issue: ' + ComicIssue)
logger.info('Initiating manual search for %s issue: %s' % (ComicName, ComicIssue))
else:
logger.info(u"Marking " + ComicName + " issue: " + ComicIssue + " as wanted...")
logger.info('Marking %s issue: %s as wanted...' % (ComicName, ComicIssue))
myDB.upsert("issues", newStatus, controlValueDict)
else:
annual_name = myDB.selectone("SELECT * FROM annuals WHERE ComicID=? and IssueID=?", [ComicID, IssueID]).fetchone()
@ -1442,55 +1439,49 @@ class WebInterface(object):
ComicName = annual_name['ReleaseComicName']
if manualsearch:
logger.info('Initiating manual search for ' + ComicName + ' : ' + ComicIssue)
logger.info('Initiating manual search for %s : %s' % (ComicName, ComicIssue))
else:
logger.info(u"Marking " + ComicName + " : " + ComicIssue + " as wanted...")
logger.info('Marking %s : %s as wanted...' % (ComicName, ComicIssue))
myDB.upsert("annuals", newStatus, controlValueDict)
#---
#this should be on it's own somewhere
#if IssueID is not None:
# controlValueDict = {"IssueID": IssueID}
# newStatus = {"Status": "Wanted"}
# myDB.upsert("issues", newStatus, controlValueDict)
#for future reference, the year should default to current year (.datetime)
if mode == 'want':
issues = myDB.selectone("SELECT IssueDate, ReleaseDate FROM issues WHERE IssueID=?", [IssueID]).fetchone()
elif mode == 'want_ann':
issues = myDB.selectone("SELECT IssueDate, ReleaseDate FROM annuals WHERE IssueID=?", [IssueID]).fetchone()
if ComicYear == None:
ComicYear = str(issues['IssueDate'])[:4]
if issues['ReleaseDate'] is None or issues['ReleaseDate'] == '0000-00-00':
logger.info('No Store Date found for given issue. This is probably due to not Refreshing the Series beforehand.')
logger.info('I Will assume IssueDate as Store Date, but you should probably Refresh the Series and try again if required.')
storedate = issues['IssueDate']
else:
storedate = issues['ReleaseDate']
#miy = myDB.selectone("SELECT * FROM comics WHERE ComicID=?", [ComicID]).fetchone()
#SeriesYear = miy['ComicYear']
#AlternateSearch = miy['AlternateSearch']
#Publisher = miy['ComicPublisher']
#UseAFuzzy = miy['UseFuzzy']
#ComicVersion = miy['ComicVersion']
moduletype = '[WANTED-SEARCH]'
passinfo = {'issueid': IssueID,
'comicname': ComicName,
'seriesyear': SeriesYear,
'comicid': ComicID,
'issuenumber': ComicIssue,
'booktype': BookType}
if mode == 'want':
issues = myDB.selectone("SELECT IssueDate, ReleaseDate FROM issues WHERE IssueID=?", [IssueID]).fetchone()
elif mode == 'want_ann':
issues = myDB.selectone("SELECT IssueDate, ReleaseDate FROM annuals WHERE IssueID=?", [IssueID]).fetchone()
if ComicYear == None:
ComicYear = str(issues['IssueDate'])[:4]
if issues['ReleaseDate'] is None or issues['ReleaseDate'] == '0000-00-00':
logger.info('No Store Date found for given issue. This is probably due to not Refreshing the Series beforehand.')
logger.info('I Will assume IssueDate as Store Date, but you should probably Refresh the Series and try again if required.')
storedate = issues['IssueDate']
else:
storedate = issues['ReleaseDate']
if BookType == 'TPB':
logger.info('[%s] Now Queueing %s (%s) for search' % (BookType, ComicName, SeriesYear))
logger.info('%s[%s] Now Queueing %s (%s) for search' % (moduletype, BookType, ComicName, SeriesYear))
elif ComicIssue is None:
logger.info('Now Queueing %s (%s) for search' % (ComicName, SeriesYear))
logger.info('%s Now Queueing %s (%s) for search' % (moduletype, ComicName, SeriesYear))
else:
logger.info('Now Queueing %s (%s) #%s for search' % (ComicName, SeriesYear, ComicIssue))
s = mylar.SEARCH_QUEUE.put({'issueid': IssueID, 'comicname': ComicName, 'seriesyear': SeriesYear, 'comicid': ComicID, 'issuenumber': ComicIssue, 'booktype': BookType})
# foundcom, prov = search.search_init(ComicName, ComicIssue, ComicYear, SeriesYear, Publisher, issues['IssueDate'], storedate, IssueID, AlternateSearch, UseAFuzzy, ComicVersion, mode=mode, ComicID=ComicID, manualsearch=manualsearch, filesafe=ComicName_Filesafe, allow_packs=AllowPacks, torrentid_32p=TorrentID_32p)
# if foundcom['status'] is True:
# # file check to see if issue exists and update 'have' count
# if IssueID is not None:
# logger.info("passing to updater.")
# return updater.foundsearch(ComicID, IssueID, mode=mode, provider=prov, hash=foundcom['info']['t_hash'])
logger.info('%s Now Queueing %s (%s) #%s for search' % (moduletype, ComicName, SeriesYear, ComicIssue))
#s = mylar.SEARCH_QUEUE.put({'issueid': IssueID, 'comicname': ComicName, 'seriesyear': SeriesYear, 'comicid': ComicID, 'issuenumber': ComicIssue, 'booktype': BookType})
s = mylar.SEARCH_QUEUE.put(passinfo)
if manualsearch:
# if it's a manual search, return to null here so the thread will die and not cause http redirect errors.
return
if ComicID:
return cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % ComicID)
else:
raise cherrypy.HTTPRedirect(redirect)
return
#raise cherrypy.HTTPRedirect(redirect)
queueissue.exposed = True
def unqueueissue(self, IssueID, ComicID, ComicName=None, Issue=None, FutureID=None, mode=None, ReleaseComicID=None):
@ -2198,6 +2189,25 @@ class WebInterface(object):
annualDelete.exposed = True
def ddl_requeue(self, id, mode):
myDB = db.DBConnection()
item = myDB.selectone("SELECT * FROM DDL_INFO WHERE ID=?", [id]).fetchone()
if item is not None:
if mode == 'resume':
if item['status'] != 'Completed':
filesize = os.stat(os.path.join(mylar.CONFIG.DDL_LOCATION, item['filename'])).st_size
mylar.DDL_QUEUE.put({'link': item['link'],
'mainlink': item['mainlink'],
'series': item['series'],
'year': item['year'],
'size': item['size'],
'comicid': item['comicid'],
'issueid': item['issueid'],
'id': item['id'],
'resume': filesize})
ddl_requeue.exposed = True
def queueManage(self): # **args):
myDB = db.DBConnection()
activelist = 'There are currently no items currently downloading via Direct Download (DDL).'
@ -2211,22 +2221,28 @@ class WebInterface(object):
'id': active['id']}
resultlist = 'There are currently no items waiting in the Direct Download (DDL) Queue for processing.'
s_info = myDB.select("SELECT a.ComicName, a.ComicVersion, a.ComicID, a.ComicYear, b.Issue_Number, b.IssueID, c.size, c.status, c.id FROM comics as a INNER JOIN issues as b ON a.ComicID = b.ComicID INNER JOIN ddl_info as c ON b.IssueID = c.IssueID WHERE c.status != 'Downloading'")
s_info = myDB.select("SELECT a.ComicName, a.ComicVersion, a.ComicID, a.ComicYear, b.Issue_Number, b.IssueID, c.size, c.status, c.id, c.updated_date FROM comics as a INNER JOIN issues as b ON a.ComicID = b.ComicID INNER JOIN ddl_info as c ON b.IssueID = c.IssueID WHERE c.status != 'Downloading'")
if s_info:
resultlist = []
for si in s_info:
issue = si['Issue_Number']
if issue is not None:
issue = '#%s' % issue
resultlist.append({'series': si['ComicName'],
'issue': issue,
'id': si['id'],
'volume': si['ComicVersion'],
'year': si['ComicYear'],
'size': si['size'].strip(),
'comicid': si['ComicID'],
'issueid': si['IssueID'],
'status': si['status']})
issue = '#%s' % issue
if si['status'] == 'Completed':
si_status = '100%'
else:
si_status = ''
resultlist.append({'series': si['ComicName'],
'issue': issue,
'id': si['id'],
'volume': si['ComicVersion'],
'year': si['ComicYear'],
'size': si['size'].strip(),
'comicid': si['ComicID'],
'issueid': si['IssueID'],
'status': si['status'],
'updated_date': si['updated_date'],
'progress': si_status})
logger.info('resultlist: %s' % resultlist)
return serve_template(templatename="queue_management.html", title="Queue Management", activelist=activelist, resultlist=resultlist)
@ -2764,6 +2780,12 @@ class WebInterface(object):
return serve_template(templatename="readinglist.html", title="Reading Lists", issuelist=readlist, counts=counts)
readlist.exposed = True
def clear_arcstatus(self, issuearcid=None):
myDB = db.DBConnection()
myDB.upsert('storyarcs', {'Status': 'Skipped'}, {'IssueArcID': issuearcid})
logger.info('Status set to Skipped.')
clear_arcstatus.exposed = True
def storyarc_main(self, arcid=None):
myDB = db.DBConnection()
arclist = []
@ -2843,7 +2865,7 @@ class WebInterface(object):
elif lowyear == maxyear:
spanyears = str(maxyear)
else:
spanyears = str(lowyear) + ' - ' + str(maxyear)
spanyears = '%s - %s' % (lowyear, maxyear)
sdir = helpers.arcformat(arcinfo[0]['StoryArc'], spanyears, arcpub)
@ -3311,18 +3333,18 @@ class WebInterface(object):
elif lowyear == maxyear:
spanyears = str(maxyear)
else:
spanyears = str(lowyear) + ' - ' + str(maxyear)
spanyears = '%s - %s' % (lowyear, maxyear)
logger.info('arcpub: ' + arcpub)
logger.info('arcpub: %s' % arcpub)
dstloc = helpers.arcformat(arcdir, spanyears, arcpub)
filelist = None
if dstloc is not None:
if not os.path.isdir(dstloc):
if mylar.CONFIG.STORYARCDIR:
logger.info('Story Arc Directory [' + dstloc + '] does not exist! - attempting to create now.')
logger.info('Story Arc Directory [%s] does not exist! - attempting to create now.' % dstloc)
else:
logger.info('Story Arc Grab-Bag Directory [' + dstloc + '] does not exist! - attempting to create now.')
logger.info('Story Arc Grab-Bag Directory [%s] does not exist! - attempting to create now.' % dstloc)
checkdirectory = filechecker.validateAndCreateDirectory(dstloc, True)
if not checkdirectory:
logger.warn('Error trying to validate/create directory. Aborting this process at this time.')
@ -3346,7 +3368,7 @@ class WebInterface(object):
fchk = filechecker.FileChecker(dir=dstloc, watchcomic=None, Publisher=None, sarc='true', justparse=True)
filechk = fchk.listFiles()
fccnt = filechk['comiccount']
logger.fdebug('[STORY ARC DIRECTORY] ' + str(fccnt) + ' files exist within this directory.')
logger.fdebug('[STORY ARC DIRECTORY] %s files exist within this directory.' % fccnt)
if fccnt > 0:
filelist = filechk['comiclist']
logger.info(filechk)
@ -3357,11 +3379,14 @@ class WebInterface(object):
sarc_title = None
showonreadlist = 1 # 0 won't show storyarcissues on storyarcs main page, 1 will show
for arc in ArcWatch:
newStatus = 'Skipped'
if arc['Manual'] == 'deleted':
continue
sarc_title = arc['StoryArc']
logger.fdebug('[' + arc['StoryArc'] + '] ' + arc['ComicName'] + ' : ' + arc['IssueNumber'])
logger.fdebug('[%s] %s : %s' % (arc['StoryArc'], arc['ComicName'], arc['IssueNumber']))
matcheroso = "no"
#fc = filechecker.FileChecker(watchcomic=arc['ComicName'])
@ -3379,29 +3404,45 @@ class WebInterface(object):
# if it's a multi-volume series, it's decimalized - let's get rid of the decimal.
GCDissue, whocares = helpers.decimal_issue(arc['IssueNumber'])
GCDissue = int(GCDissue) / 1000
if '.' not in str(GCDissue): GCDissue = str(GCDissue) + ".00"
logger.fdebug("issue converted to " + str(GCDissue))
if '.' not in str(GCDissue):
GCDissue = '%s.00' % GCDissue
logger.fdebug("issue converted to %s" % GCDissue)
isschk = myDB.selectone("SELECT * FROM issues WHERE Issue_Number=? AND ComicID=?", [str(GCDissue), comic['ComicID']]).fetchone()
else:
issue_int = helpers.issuedigits(arc['IssueNumber'])
logger.fdebug('int_issue = ' + str(issue_int))
logger.fdebug('int_issue = %s' % issue_int)
isschk = myDB.selectone("SELECT * FROM issues WHERE Int_IssueNumber=? AND ComicID=?", [issue_int, comic['ComicID']]).fetchone() #AND STATUS !='Snatched'", [issue_int, comic['ComicID']]).fetchone()
if isschk is None:
logger.fdebug("we matched on name, but issue " + arc['IssueNumber'] + " doesn't exist for " + comic['ComicName'])
logger.fdebug('We matched on name, but issue %s doesn\'t exist for %s' % (arc['IssueNumber'], comic['ComicName']))
else:
#this gets ugly - if the name matches and the issue, it could still be wrong series
#use series year to break it down further.
logger.fdebug('COMIC-comicyear: ' + str(int(comic['ComicYear'])))
logger.fdebug('ARC-seriesyear: ' + str(int(arc['SeriesYear'])))
if int(comic['ComicYear']) != int(arc['SeriesYear']):
logger.fdebug("Series years are different - discarding match. " + str(comic['ComicYear']) + " != " + str(arc['SeriesYear']))
logger.fdebug('COMIC-comicyear: %s' % comic['ComicYear'])
logger.fdebug('B4-ARC-seriesyear: %s' % arc['SeriesYear'])
if any([arc['SeriesYear'] is None, arc['SeriesYear'] == 'None']):
vy = '2099-00-00'
for x in isschk:
if any([x['IssueDate'] is None, x['IssueDate'] == '0000-00-00']):
sy = x['StoreDate']
if any([sy is None, sy == '0000-00-00']):
continue
else:
sy = x['IssueDate']
if sy < vy:
v_seriesyear = sy
seriesyear = v_seriesyear
logger.info('No Series year set. Discovered & set to %s' % seriesyear)
else:
logger.fdebug("issue #: %s is present!" % arc['IssueNumber'])
logger.fdebug('isschk: %s' % isschk)
logger.fdebug("Comicname: " + arc['ComicName'])
logger.fdebug("ComicID: " + str(isschk['ComicID']))
logger.fdebug("Issue: %s" % arc['IssueNumber'])
logger.fdebug("IssueArcID: " + str(arc['IssueArcID']))
seriesyear = arc['SeriesYear']
logger.fdebug('ARC-seriesyear: %s' % seriesyear)
if int(comic['ComicYear']) != int(seriesyear):
logger.fdebug('Series years are different - discarding match. %s != %s' % (comic['ComicYear'], seriesyear))
else:
logger.fdebug('issue #: %s is present!' % arc['IssueNumber'])
logger.fdebug('Comicname: %s' % arc['ComicName'])
logger.fdebug('ComicID: %s' % isschk['ComicID'])
logger.fdebug('Issue: %s' % arc['IssueNumber'])
logger.fdebug('IssueArcID: %s' % arc['IssueArcID'])
#gather the matches now.
arc_match.append({
"match_storyarc": arc['StoryArc'],
@ -3416,17 +3457,17 @@ class WebInterface(object):
matcheroso = "yes"
break
if matcheroso == "no":
logger.fdebug("[NO WATCHLIST MATCH] Unable to find a match for " + arc['ComicName'] + " :#" + arc['IssueNumber'])
logger.fdebug('[NO WATCHLIST MATCH] Unable to find a match for %s :#%s' % (arc['ComicName'], arc['IssueNumber']))
wantedlist.append({
"ComicName": arc['ComicName'],
"IssueNumber": arc['IssueNumber'],
"IssueYear": arc['IssueYear']})
if filelist is not None and mylar.CONFIG.STORYARCDIR:
logger.fdebug("[NO WATCHLIST MATCH] Checking against lcoal Arc directory for given issue.")
logger.fdebug('[NO WATCHLIST MATCH] Checking against local Arc directory for given issue.')
fn = 0
valids = [x for x in filelist if re.sub('[\|\s]','', x['dynamic_name'].lower()).strip() == re.sub('[\|\s]','', arc['DynamicComicName'].lower()).strip()]
logger.info('valids: ' + str(valids))
logger.fdebug('valids: %s' % valids)
if len(valids) > 0:
for tmpfc in valids: #filelist:
haveissue = "no"
@ -3435,19 +3476,19 @@ class WebInterface(object):
fcdigit = helpers.issuedigits(arc['IssueNumber'])
int_iss = helpers.issuedigits(temploc)
if int_iss == fcdigit:
logger.fdebug(arc['ComicName'] + ' Issue #' + arc['IssueNumber'] + ' already present in StoryArc directory.')
logger.fdebug('%s Issue #%s already present in StoryArc directory' % (arc['ComicName'], arc['IssueNumber']))
#update storyarcs db to reflect status.
rr_rename = False
if mylar.CONFIG.READ2FILENAME:
readorder = helpers.renamefile_readingorder(arc['ReadingOrder'])
if all([tmpfc['reading_order'] is not None, int(readorder) != int(tmpfc['reading_order']['reading_sequence'])]):
logger.warn('reading order sequence has changed for this issue from ' + str(tmpfc['reading_order']['reading_sequence']) + ' to ' + str(readorder))
logger.warn('reading order sequence has changed for this issue from %s to %s' % (tmpfc['reading_order']['reading_sequence'], readorder))
rr_rename = True
dfilename = str(readorder) + '-' + tmpfc['reading_order']['filename']
dfilename = '%s-%s' % (readorder, tmpfc['reading_order']['filename'])
elif tmpfc['reading_order'] is None:
dfilename = str(readorder) + '-' + tmpfc['comicfilename']
dfilename = '%s-%s' % (readorder, tmpfc['comicfilename'])
else:
dfilename = str(readorder) + '-' + tmpfc['reading_order']['filename']
dfilename = '%s-%s' % (readorder, tmpfc['reading_order']['filename'])
else:
dfilename = tmpfc['comicfilename']
@ -3457,21 +3498,30 @@ class WebInterface(object):
loc_path = os.path.join(tmpfc['comiclocation'], dfilename)
if rr_rename:
logger.fdebug('Now re-sequencing file to : ' + dfilename)
logger.fdebug('Now re-sequencing file to : %s' % dfilename)
os.rename(os.path.join(tmpfc['comiclocation'],tmpfc['comicfilename']), loc_path)
newVal = {"Status": "Downloaded",
newStatus = 'Downloaded'
newVal = {"Status": newStatus,
"Location": loc_path} #dfilename}
ctrlVal = {"IssueArcID": arc['IssueArcID']}
myDB.upsert("storyarcs", newVal, ctrlVal)
break
else:
newStatus = 'Skipped'
fn+=1
if newStatus == 'Skipped':
#this will set all None Status' to Skipped (at least initially)
newVal = {"Status": "Skipped"}
ctrlVal = {"IssueArcID": arc['IssueArcID']}
myDB.upsert("storyarcs", newVal, ctrlVal)
continue
newVal = {"Status": "Skipped"}
ctrlVal = {"IssueArcID": arc['IssueArcID']}
myDB.upsert("storyarcs", newVal, ctrlVal)
logger.fdebug(str(len(arc_match)) + " issues currently exist on your watchlist that are within this arc. Analyzing...")
logger.fdebug('%s issues currently exist on your watchlist that are within this arc. Analyzing...' % len(arc_match))
for m_arc in arc_match:
#now we cycle through the issues looking for a match.
#issue = myDB.selectone("SELECT * FROM issues where ComicID=? and Issue_Number=?", [m_arc['match_id'], m_arc['match_issue']]).fetchone()
@ -3479,11 +3529,9 @@ class WebInterface(object):
if issue is None: pass
else:
logger.fdebug("issue: " + issue['Issue_Number'] + "..." + m_arc['match_issue'])
# if helpers.decimal_issue(issuechk['Issue_Number']) == helpers.decimal_issue(m_arc['match_issue']):
logger.fdebug('issue: %s ... %s' % (issue['Issue_Number'], m_arc['match_issue']))
if issue['Issue_Number'] == m_arc['match_issue']:
logger.fdebug("we matched on " + issue['Issue_Number'] + " for " + m_arc['match_name'])
logger.fdebug('We matched on %s for %s' % (issue['Issue_Number'], m_arc['match_name']))
if issue['Status'] == 'Downloaded' or issue['Status'] == 'Archived' or issue['Status'] == 'Snatched':
if showonreadlist:
showctrlVal = {"IssueID": issue['IssueID']}
@ -3494,7 +3542,7 @@ class WebInterface(object):
"ComicID": m_arc['match_id']}
myDB.upsert("readlist", shownewVal, showctrlVal)
logger.fdebug("Already have " + issue['ComicName'] + " :# " + issue['Issue_Number'])
logger.fdebug('Already have %s : #%s' % (issue['ComicName'], issue['Issue_Number']))
if issue['Location'] is not None:
issloc = os.path.join(m_arc['match_filedirectory'], issue['Location'])
else:
@ -3512,10 +3560,10 @@ class WebInterface(object):
continue
except:
pass
logger.fdebug('source location set to : ' + issloc)
logger.fdebug('source location set to : %s' % issloc)
if all([mylar.CONFIG.STORYARCDIR, mylar.CONFIG.COPY2ARCDIR]):
logger.fdebug('Destination location set to : ' + m_arc['destination_location'])
logger.fdebug('Destination location set to : %s' % m_arc['destination_location'])
logger.fdebug('Attempting to copy into StoryArc directory')
#copy into StoryArc directory...
@ -3525,13 +3573,13 @@ class WebInterface(object):
if mylar.CONFIG.READ2FILENAME:
readorder = helpers.renamefile_readingorder(m_arc['match_readingorder'])
if all([m_arc['match_readingorder'] is not None, int(readorder) != int(m_arc['match_readingorder'])]):
logger.warn('reading order sequence has changed for this issue from ' + str(m_arc['match_reading_order']) + ' to ' + str(readorder))
logger.warn('Reading order sequence has changed for this issue from %s to %s' % (m_arc['match_reading_order'], readorder))
rr_rename = True
dfilename = str(readorder) + '-' + issue['Location']
dfilename = '%s-%s' % (readorder, issue['Location'])
elif m_arc['match_readingorder'] is None:
dfilename = str(readorder) + '-' + issue['Location']
dfilename = '%s-%s' % (readorder, issue['Location'])
else:
dfilename = str(readorder) + '-' + issue['Location']
dfilename = '%s-%s' % (readorder, issue['Location'])
else:
dfilename = issue['Location']
@ -3542,21 +3590,21 @@ class WebInterface(object):
dstloc = os.path.join(m_arc['destination_location'], dfilename)
if rr_rename:
logger.fdebug('Now re-sequencing COPIED file to : ' + dfilename)
logger.fdebug('Now re-sequencing COPIED file to : %s' % dfilename)
os.rename(issloc, dstloc)
if not os.path.isfile(dstloc):
logger.fdebug('Copying ' + issloc + ' to ' + dstloc)
logger.fdebug('Copying %s to %s' % (issloc, dstloc))
try:
fileoperation = helpers.file_ops(issloc, dstloc, arc=True)
if not fileoperation:
raise OSError
except (OSError, IOError):
logger.error('Failed to ' + mylar.CONFIG.FILE_OPTS + ' ' + issloc + ' - check directories and manually re-run.')
logger.error('Failed to %s %s - check directories and manually re-run.' % (mylar.CONFIG.FILE_OPTS, issloc))
continue
else:
logger.fdebug('Destination file exists: ' + dstloc)
logger.fdebug('Destination file exists: %s' % dstloc)
location_path = dstloc
else:
location_path = issloc
@ -3569,12 +3617,12 @@ class WebInterface(object):
myDB.upsert("storyarcs",newVal,ctrlVal)
else:
logger.fdebug("We don't have " + issue['ComicName'] + " :# " + issue['Issue_Number'])
logger.fdebug('We don\'t have %s : #%s' % (issue['ComicName'], issue['Issue_Number']))
ctrlVal = {"IssueArcID": m_arc['match_issuearcid']}
newVal = {"Status": issue['Status'], #"Wanted",
"IssueID": issue['IssueID']}
myDB.upsert("storyarcs", newVal, ctrlVal)
logger.info("Marked " + issue['ComicName'] + " :# " + issue['Issue_Number'] + " as " + issue['Status'])
logger.info('Marked %s :#%s as %s' % (issue['ComicName'], issue['Issue_Number'], issue['Status']))
arcstats = self.storyarc_main(StoryArcID)
logger.info('[STORY-ARCS] Completed Missing/Recheck Files for %s [%s / %s]' % (arcname, arcstats['Have'], arcstats['TotalIssues']))
@ -3588,7 +3636,6 @@ class WebInterface(object):
def ReadGetWanted(self, StoryArcID):
# this will queue up (ie. make 'Wanted') issues in a given Story Arc that are 'Not Watched'
print StoryArcID
stupdate = []
mode = 'story_arc'
myDB = db.DBConnection()
@ -3596,25 +3643,25 @@ class WebInterface(object):
if wantedlist is not None:
for want in wantedlist:
print want
issuechk = myDB.selectone("SELECT * FROM issues WHERE IssueID=?", [want['IssueArcID']]).fetchone()
issuechk = myDB.selectone("SELECT a.Type, a.ComicYear, b.ComicName, b.Issue_Number, b.ComicID, b.IssueID FROM comics as a INNER JOIN issues as b on a.ComicID = b.ComicID WHERE b.IssueID=?", [want['IssueArcID']]).fetchone()
SARC = want['StoryArc']
IssueArcID = want['IssueArcID']
Publisher = want['Publisher']
if issuechk is None:
# none means it's not a 'watched' series
s_comicid = want['ComicID'] #None
s_issueid = want['IssueID'] #None
s_issueid = want['IssueArcID'] #None
BookType = want['Type']
stdate = want['ReleaseDate']
issdate = want['IssueDate']
logger.fdebug("-- NOT a watched series queue.")
logger.fdebug(want['ComicName'] + " -- #" + str(want['IssueNumber']))
logger.fdebug(u"Story Arc : " + str(SARC) + " queueing the selected issue...")
logger.fdebug(u"IssueArcID : " + str(IssueArcID))
logger.fdebug(u"ComicID: " + str(s_comicid) + " --- IssueID: " + str(s_issueid)) # no comicid in issues table.
logger.fdebug(u"ReleaseDate: " + str(stdate) + " --- IssueDate: " + str(issdate))
#logger.info(u'Publisher: ' + want['Publisher']) <-- no publisher in issues table.
logger.fdebug('%s -- #%s' % (want['ComicName'], want['IssueNumber']))
logger.fdebug('Story Arc %s : queueing the selected issue...' % SARC)
logger.fdebug('IssueArcID : %s' % IssueArcID)
logger.fdebug('ComicID: %s --- IssueID: %s' % (s_comicid, s_issueid)) # no comicid in issues table.
logger.fdebug('ReleaseDate: %s --- IssueDate: %s' % (stdate, issdate))
issueyear = want['IssueYEAR']
logger.fdebug('IssueYear: ' + str(issueyear))
logger.fdebug('IssueYear: %s' % issueyear)
if issueyear is None or issueyear == 'None':
try:
logger.fdebug('issdate:' + str(issdate))
@ -3624,31 +3671,44 @@ class WebInterface(object):
except:
issueyear = stdate[:4]
logger.fdebug('ComicYear: ' + str(want['SeriesYear']))
foundcom, prov = search.search_init(ComicName=want['ComicName'], IssueNumber=want['IssueNumber'], ComicYear=issueyear, SeriesYear=want['SeriesYear'], Publisher=Publisher, IssueDate=issdate, StoreDate=stdate, IssueID=s_issueid, SARC=SARC, IssueArcID=IssueArcID, oneoff=True)
logger.fdebug('ComicYear: %s' % want['SeriesYear'])
passinfo = {'issueid': s_issueid,
'comicname': want['ComicName'],
'seriesyear': want['SeriesYear'],
'comicid': s_comicid,
'issuenumber': want['IssueNumber'],
'booktype': BookType}
#oneoff = True ?
else:
# it's a watched series
s_comicid = issuechk['ComicID']
s_issueid = issuechk['IssueID']
logger.fdebug("-- watched series queue.")
logger.fdebug(issuechk['ComicName'] + " -- #" + str(issuechk['Issue_Number']))
foundcom, prov = search.search_init(ComicName=issuechk['ComicName'], IssueNumber=issuechk['Issue_Number'], ComicYear=issuechk['IssueYear'], SeriesYear=issuechk['SeriesYear'], Publisher=Publisher, IssueDate=None, StoreDate=issuechk['ReleaseDate'], IssueID=issuechk['IssueID'], AlternateSearch=None, UseFuzzy=None, ComicVersion=None, SARC=SARC, IssueArcID=IssueArcID)
logger.fdebug('%s --- #%s' % (issuechk['ComicName'], issuechk['Issue_Number']))
passinfo = {'issueid': s_issueid,
'comicname': issuechk['ComicName'],
'seriesyear': issuechk['SeriesYear'],
'comicid': s_comicid,
'issuenumber': issuechk['Issue_Number'],
'booktype': issuechk['Type']}
if foundcom['status'] is True:
logger.fdebug('sucessfully found.')
#update the status - this is necessary for torrents as they are in 'snatched' status.
updater.foundsearch(s_comicid, s_issueid, mode=mode, provider=prov, SARC=SARC, IssueArcID=IssueArcID)
else:
logger.fdebug('not sucessfully found.')
stupdate.append({"Status": "Wanted",
"IssueArcID": IssueArcID,
"IssueID": s_issueid})
mylar.SEARCH_QUEUE.put(passinfo)
#if foundcom['status'] is True:
# logger.fdebug('sucessfully found.')
# #update the status - this is necessary for torrents as they are in 'snatched' status.
# updater.foundsearch(s_comicid, s_issueid, mode=mode, provider=prov, SARC=SARC, IssueArcID=IssueArcID)
#else:
# logger.fdebug('not sucessfully found.')
# stupdate.append({"Status": "Wanted",
# "IssueArcID": IssueArcID,
# "IssueID": s_issueid})
watchlistchk = myDB.select("SELECT * FROM storyarcs WHERE StoryArcID=? AND Status='Wanted'", [StoryArcID])
if watchlistchk is not None:
for watchchk in watchlistchk:
logger.fdebug('Watchlist hit - ' + str(watchchk['ComicName']))
issuechk = myDB.selectone("SELECT * FROM issues WHERE IssueID=?", [watchchk['IssueArcID']]).fetchone()
logger.fdebug('Watchlist hit - %s' % watchchk['ComicName'])
issuechk = myDB.selectone("SELECT a.Type, a.ComicYear, b.ComicName, b.Issue_Number, b.ComicID, b.IssueID FROM comics as a INNER JOIN issues as b on a.ComicID = b.ComicID WHERE b.IssueID=?", [watchchk['IssueArcID']]).fetchone()
SARC = watchchk['StoryArc']
IssueArcID = watchchk['IssueArcID']
if issuechk is None:
@ -3659,17 +3719,17 @@ class WebInterface(object):
s_comicid = None
try:
s_issueid = watchchk['IssueID']
s_issueid = watchchk['IssueArcID']
except:
s_issueid = None
logger.fdebug("-- NOT a watched series queue.")
logger.fdebug(watchchk['ComicName'] + " -- #" + str(watchchk['IssueNumber']))
logger.fdebug(u"Story Arc : " + str(SARC) + " queueing up the selected issue...")
logger.fdebug(u"IssueArcID : " + str(IssueArcID))
logger.fdebug('%s -- #%s' % (watchchk['ComicName'], watchchk['IssueNumber']))
logger.fdebug('Story Arc : %s queueing up the selected issue...' % SARC)
logger.fdebug('IssueArcID : %s' % IssueArcID)
try:
issueyear = watchchk['IssueYEAR']
logger.fdebug('issueYEAR : ' + issueyear)
logger.fdebug('issueYEAR : %s' % issueyear)
except:
try:
issueyear = watchchk['IssueDate'][:4]
@ -3678,39 +3738,55 @@ class WebInterface(object):
stdate = watchchk['ReleaseDate']
issdate = watchchk['IssueDate']
logger.fdebug('issueyear : ' + str(issueyear))
logger.fdebug('comicname : ' + watchchk['ComicName'])
logger.fdebug('issuenumber : ' + watchchk['IssueNumber'])
logger.fdebug('comicyear : ' + watchchk['SeriesYear'])
logger.fdebug('issueyear : %s' % issueyear)
logger.fdebug('comicname : %s' % watchchk['ComicName'])
logger.fdebug('issuenumber : %s' % watchchk['IssueNumber'])
logger.fdebug('comicyear : %s' % watchchk['SeriesYear'])
#logger.info('publisher : ' + watchchk['IssuePublisher']) <-- no publisher in table
logger.fdebug('SARC : ' + SARC)
logger.fdebug('IssueArcID : ' + IssueArcID)
foundcom, prov = search.search_init(ComicName=watchchk['ComicName'], IssueNumber=watchchk['IssueNumber'], ComicYear=issueyear, SeriesYear=watchchk['SeriesYear'], Publisher=None, IssueDate=issdate, StoreDate=stdate, IssueID=s_issueid, SARC=SARC, IssueArcID=IssueArcID, oneoff=True)
logger.fdebug('SARC : %s' % SARC)
logger.fdebug('IssueArcID : %s' % IssueArcID)
passinfo = {'issueid': s_issueid,
'comicname': watchchk['ComicName'],
'seriesyear': watchchk['SeriesYear'],
'comicid': s_comicid,
'issuenumber': watchchk['IssueNumber'],
'booktype': watchchk['Type']}
#foundcom, prov = search.search_init(ComicName=watchchk['ComicName'], IssueNumber=watchchk['IssueNumber'], ComicYear=issueyear, SeriesYear=watchchk['SeriesYear'], Publisher=None, IssueDate=issdate, StoreDate=stdate, IssueID=s_issueid, SARC=SARC, IssueArcID=IssueArcID, oneoff=True)
else:
# it's a watched series
s_comicid = issuechk['ComicID']
s_issueid = issuechk['IssueID']
logger.fdebug("-- watched series queue.")
logger.fdebug(issuechk['ComicName'] + " -- #" + str(issuechk['Issue_Number']))
foundcom, prov = search.search_init(ComicName=issuechk['ComicName'], IssueNumber=issuechk['Issue_Number'], ComicYear=issuechk['IssueYear'], SeriesYear=issuechk['SeriesYear'], Publisher=None, IssueDate=None, StoreDate=issuechk['ReleaseDate'], IssueID=issuechk['IssueID'], AlternateSearch=None, UseFuzzy=None, ComicVersion=None, SARC=SARC, IssueArcID=IssueArcID, mode=None, rsscheck=None, ComicID=None)
if foundcom['status'] is True:
updater.foundsearch(s_comicid, s_issueid, mode=mode, provider=prov, SARC=SARC, IssueArcID=IssueArcID)
else:
logger.fdebug('Watchlist issue not sucessfully found')
logger.fdebug('issuearcid: ' + str(IssueArcID))
logger.fdebug('issueid: ' + str(s_issueid))
stupdate.append({"Status": "Wanted",
"IssueArcID": IssueArcID,
"IssueID": s_issueid})
logger.fdebug('-- watched series queue.')
logger.fdebug('%s -- #%s' % (issuechk['ComicName'], issuechk['Issue_Number']))
passinfo = {'issueid': s_issueid,
'comicname': issuechk['ComicName'],
'seriesyear': issuechk['SeriesYear'],
'comicid': s_comicid,
'issuenumber': issuechk['Issue_Number'],
'booktype': issuechk['Type']}
#foundcom, prov = search.search_init(ComicName=issuechk['ComicName'], IssueNumber=issuechk['Issue_Number'], ComicYear=issuechk['IssueYear'], SeriesYear=issuechk['SeriesYear'], Publisher=None, IssueDate=None, StoreDate=issuechk['ReleaseDate'], IssueID=issuechk['IssueID'], AlternateSearch=None, UseFuzzy=None, ComicVersion=None, SARC=SARC, IssueArcID=IssueArcID, mode=None, rsscheck=None, ComicID=None)
mylar.SEARCH_QUEUE.put(passinfo)
#if foundcom['status'] is True:
# updater.foundsearch(s_comicid, s_issueid, mode=mode, provider=prov, SARC=SARC, IssueArcID=IssueArcID)
#else:
# logger.fdebug('Watchlist issue not sucessfully found')
# logger.fdebug('issuearcid: %s' % IssueArcID)
# logger.fdebug('issueid: %s' % s_issueid)
# stupdate.append({"Status": "Wanted",
# "IssueArcID": IssueArcID,
# "IssueID": s_issueid})
if len(stupdate) > 0:
logger.fdebug(str(len(stupdate)) + ' issues need to get updated to Wanted Status')
logger.fdebug('%s issues need to get updated to Wanted Status' % len(stupdate))
for st in stupdate:
ctrlVal = {'IssueArcID': st['IssueArcID']}
newVal = {'Status': st['Status']}
if st['IssueID']:
if st['IssueID']:
logger.fdebug('issueid:' + str(st['IssueID']))
logger.fdebug('issueid: %s' %st['IssueID'])
newVal['IssueID'] = st['IssueID']
myDB.upsert("storyarcs", newVal, ctrlVal)
ReadGetWanted.exposed = True
@ -5672,18 +5748,23 @@ class WebInterface(object):
testslack.exposed = True
def testrtorrent(self, host, username, password, auth, verify, ssl, rpc_url):
def testrtorrent(self, host, username, password, auth, verify, rpc_url):
import torrent.clients.rtorrent as TorClient
client = TorClient.TorrentClient()
ca_bundle = None
if mylar.CONFIG.RTORRENT_CA_BUNDLE is not None:
ca_bundle = mylar.CONFIG.RTORRENT_CA_BUNDLE
if not client.connect(host, username, password, auth, verify, ssl, rpc_url, ca_bundle):
rclient = client.connect(host, username, password, auth, verify, rpc_url, ca_bundle, test=True)
if not rclient:
logger.warn('Could not establish connection to %s' % host)
return 'Error establishing connection to Rtorrent'
return '[rTorrent] Error establishing connection to Rtorrent'
else:
logger.info('Successfully validated connection to %s' % host)
return "Successfully validated connection to %s" % host
if rclient['status'] is False:
logger.warn('[rTorrent] Could not establish connection to %s. Error returned: %s' % (host, rclient['error']))
return 'Error establishing connection to rTorrent'
else:
logger.info('[rTorrent] Successfully validated connection to %s [v%s]' % (host, rclient['version']))
return 'Successfully validated rTorrent connection'
testrtorrent.exposed = True
def testqbit(self, host, username, password):
@ -5807,6 +5888,17 @@ class WebInterface(object):
test_32p.exposed = True
def check_ActiveDDL(self):
myDB = db.DBConnection()
active = myDB.selectone("SELECT * FROM DDL_INFO WHERE STATUS = 'Downloading'").fetchone()
if active is None:
return "There are no active downloads currently being attended to"
else:
filesize = os.stat(os.path.join(mylar.CONFIG.DDL_LOCATION, active['filename'])).st_size
cmath = int(float(filesize*100)/int(int(active['remote_filesize'])*100) * 100)
return "%s%s" % (cmath, '%')
check_ActiveDDL.exposed = True
def create_readlist(self, list=None, weeknumber=None, year=None):
# ({
# "PUBLISHER": weekly['PUBLISHER'],