FIX:(#1529) Retrying downloads would fail to start search, FIX: One-off downloads from the weekly pull should now be working again, FIX: Test button for 32P Auth mode option will display proper response

This commit is contained in:
evilhero 2017-01-20 14:15:15 -05:00
parent 02fa5baf1e
commit caa5894554
4 changed files with 36 additions and 26 deletions

View File

@ -110,7 +110,7 @@
%endif
<a href="searchit?name=${weekly['COMIC'] | u}&issue=${weekly['ISSUE']}&mode=pullseries" title="Search for this series to add to your watchlist"><span class="ui-icon ui-icon-plus"></span>Search</a>
%endif
<a href="queueissue?ComicName=${weekly['COMIC'] | u}&ComicIssue=${weekly['ISSUE']}&mode=pullwant&Publisher=${weekly['PUBLISHER']}" title="Just grab it"><span class="ui-icon ui-icon-plus"></span>One-Off</a>
<a href="queueissue?ComicName=${weekly['COMIC'] | u}&ComicIssue=${weekly['ISSUE']}&mode=pullwant&Publisher=${weekly['PUBLISHER']}&pullinfo=${weekinfo['midweek']}" title="Just grab it"><span class="ui-icon ui-icon-plus"></span>One-Off</a>
%endif
%endif
</td>

View File

@ -24,12 +24,14 @@ class info32p(object):
'Accept-Charset': 'utf-8',
'User-Agent': 'Mozilla/5.0'}
if test:
if test is True:
self.test = True
else:
self.test = False
self.error = None
self.method = None
lses = self.LoginSession(mylar.USERNAME_32P, mylar.PASSWORD_32P)
if not lses.login():
@ -42,18 +44,20 @@ class info32p(object):
else:
return self.method
else:
logger.info(self.module + '[LOGIN SUCCESS] Now preparing for the use of 32P keyed authentication...')
logger.fdebug(self.module + '[LOGIN SUCCESS] Now preparing for the use of 32P keyed authentication...')
self.authkey = lses.authkey
self.passkey = lses.passkey
self.uid = lses.uid
self.reauthenticate = reauthenticate
self.searchterm = searchterm
self.test = test
self.publisher_list = {'Entertainment', 'Press', 'Comics', 'Publishing', 'Comix', 'Studios!'}
def authenticate(self):
if self.test:
return True
feedinfo = []
try:
@ -321,7 +325,7 @@ class info32p(object):
time.sleep(1) #just to make sure we don't hammer, 1s pause.
try:
d = s.post(url, params=payload, verify=True, allow_redirects=True)
logger.debug(self.module + ' Reply from AJAX: \n %s', d.text)
#logger.debug(self.module + ' Reply from AJAX: \n %s', d.text)
except Exception as e:
logger.info(self.module + ' Could not POST URL %s', url)
@ -334,7 +338,7 @@ class info32p(object):
logger.debug(self.module + ' Search Result did not return valid JSON, falling back on text: %s', searchResults.text)
return False
logger.debug(self.module + " Search Result: %s", searchResults)
#logger.debug(self.module + " Search Result: %s", searchResults)
if searchResults['status'] == 'success' and searchResults['count'] > 0:
logger.info('successfully retrieved ' + str(searchResults['count']) + ' search results.')
@ -578,11 +582,11 @@ class info32p(object):
if (self.test_login()):
logger.fdebug(self.module + ' Credential-based login was good.')
self.method = 'Session Cookie retrieved OK.'
self.method = 'Credential-based login OK.'
return True
logger.warn(self.module + ' Both session key and credential-based logins failed.')
self.method = 'Failed to retrieve Session Cookie.'
self.method = 'Both session key & credential login failed.'
return False

View File

@ -295,7 +295,7 @@ def dbUpdate(ComicIDList=None, calledfrom=None):
def latest_update(ComicID, LatestIssue, LatestDate):
# here we add to comics.latest
#logger.info(str(ComicID) + ' - updating latest_date to : ' + str(LatestDate))
logger.fdebug(str(ComicID) + ' - updating latest_date to : ' + str(LatestDate))
myDB = db.DBConnection()
latestCTRLValueDict = {"ComicID": ComicID}
newlatestDict = {"LatestIssue": str(LatestIssue),
@ -612,12 +612,13 @@ def nzblog(IssueID, NZBName, ComicName, SARC=None, IssueArcID=None, id=None, pro
if IssueID is None or IssueID == 'None':
#if IssueID is None, it's a one-off download from the pull-list.
#give it a generic ID above the last one so it doesn't throw an error later.
if mylar.HIGHCOUNT == 0:
if any([mylar.HIGHCOUNT == 0, mylar.HIGHCOUNT is None]):
mylar.HIGHCOUNT = 900000
IssueID = mylar.HIGHCOUNT
mylar.config_write()
else:
IssueID = int(mylar.HIGHCOUNT) + 1
mylar.HIGHCOUNT+=1
IssueID = mylar.HIGHCOUNT
mylar.config_write()
controlValue = {"IssueID": IssueID,
"Provider": prov}

View File

@ -757,7 +757,7 @@ class WebInterface(object):
logger.error('mode is unsupported: ' + chk[0]['mode'])
yield chk[0]['self.log']
break
return
post_process.exposed = True
def pauseSeries(self, ComicID):
@ -1277,7 +1277,7 @@ class WebInterface(object):
logger.error(str(newznab_info[0]) + ' is not enabled - unable to process retry request until provider is re-enabled.')
continue
sendit = search.searcher(Provider, nzbname, comicinfo, link=link, IssueID=IssueID, ComicID=ComicID, tmpprov=fullprov, directsend=True, newznab=newznabinfo)
sendit = search.searcher(fullprov, nzbname, comicinfo, link=link, IssueID=IssueID, ComicID=ComicID, tmpprov=fullprov, directsend=True, newznab=newznabinfo)
break
return
retryissue.exposed = True
@ -1286,7 +1286,7 @@ class WebInterface(object):
threading.Thread(target=self.queueissue, kwargs=kwargs).start()
queueit.exposed = True
def queueissue(self, mode, ComicName=None, ComicID=None, ComicYear=None, ComicIssue=None, IssueID=None, new=False, redirect=None, SeriesYear=None, SARC=None, IssueArcID=None, manualsearch=None, Publisher=None):
def queueissue(self, mode, ComicName=None, ComicID=None, ComicYear=None, ComicIssue=None, IssueID=None, new=False, redirect=None, SeriesYear=None, SARC=None, IssueArcID=None, manualsearch=None, Publisher=None, pullinfo=None):
logger.fdebug('ComicID:' + str(ComicID))
logger.fdebug('mode:' + str(mode))
now = datetime.datetime.now()
@ -1347,12 +1347,14 @@ class WebInterface(object):
#this is for marking individual comics from the pullist to be downloaded.
#because ComicID and IssueID will both be None due to pullist, it's probably
#better to set both to some generic #, and then filter out later...
cyear = myDB.selectone("SELECT SHIPDATE FROM weekly").fetchone()
ComicYear = str(cyear['SHIPDATE'])[:4]
IssueDate = pullinfo
try:
ComicYear = str(pullinfo)[:4]
except:
ComicYear == now.year
if Publisher == 'COMICS': Publisher = None
if ComicYear == '': ComicYear = now.year
logger.info(u"Marking " + ComicName + " " + ComicIssue + " as wanted...")
foundcom, prov = search.search_init(ComicName=ComicName, IssueNumber=ComicIssue, ComicYear=ComicYear, SeriesYear=None, Publisher=Publisher, IssueDate=cyear['SHIPDATE'], StoreDate=cyear['SHIPDATE'], IssueID=None, AlternateSearch=None, UseFuzzy=None, ComicVersion=None, allow_packs=False)
foundcom, prov = search.search_init(ComicName=ComicName, IssueNumber=ComicIssue, ComicYear=ComicYear, SeriesYear=None, Publisher=Publisher, IssueDate=IssueDate, StoreDate=cyear['SHIPDATE'], IssueID=None, AlternateSearch=None, UseFuzzy=None, ComicVersion=None, allow_packs=False)
if foundcom == "yes":
logger.info(u"Downloaded " + ComicName + " " + ComicIssue)
raise cherrypy.HTTPRedirect("pullist")
@ -3432,8 +3434,8 @@ class WebInterface(object):
importResults.exposed = True
def ImportFilelisting(self, comicname, dynamicname, volume):
comicname = urllib.unquote_plus(helpers.econversion(comicname))
dynamicname = helpers.econversion(urllib.unquote_plus(dynamicname)) #urllib.unquote(dynamicname).decode('utf-8')
comicname = urllib.unquote_plus(helpers.conversion(comicname))
dynamicname = helpers.conversion(urllib.unquote_plus(dynamicname)) #urllib.unquote(dynamicname).decode('utf-8')
myDB = db.DBConnection()
if volume is None or volume == 'None':
results = myDB.select("SELECT * FROM importresults WHERE (WatchMatch is Null OR WatchMatch LIKE 'C%') AND DynamicName=? AND Volume IS NULL",[dynamicname])
@ -5137,8 +5139,11 @@ class WebInterface(object):
def test_32p(self):
import auth32p
p = auth32p.info32p(test=True)
rtnvalues = p.authenticate()
logger.info('32p return values: ' + str(rtnvalues))
return rtnvalues
tmp = auth32p.info32p(test=True)
rtnvalues = tmp.authenticate()
if rtnvalues is True:
return "Successfully Authenticated."
else:
return "Could not Authenticate."
test_32p.exposed = True