diff --git a/data/interfaces/default/storyarc_detail.html b/data/interfaces/default/storyarc_detail.html
index 734948c4..2f40e831 100755
--- a/data/interfaces/default/storyarc_detail.html
+++ b/data/interfaces/default/storyarc_detail.html
@@ -41,9 +41,11 @@
Arcs in StoryArc Directory: <% sdir = os.path.join(mylar.DESTINATION_DIR, "StoryArcs") %>${sdir}
+
+
diff --git a/data/interfaces/default/weeklypull.html b/data/interfaces/default/weeklypull.html
index a4022f33..324809ca 100755
--- a/data/interfaces/default/weeklypull.html
+++ b/data/interfaces/default/weeklypull.html
@@ -101,15 +101,14 @@
${weekly['STATUS']}
%if weekly['STATUS'] == 'Skipped':
%if weekly['COMICID'] != '' and weekly['COMICID'] is not None:
- add series
+ Add
%else:
%if weekly['ISSUE'] == '1' or weekly['ISSUE'] == '0':
- Watch
- %else:
- add series
+ Watch
%endif
+ Search
%endif
- one off
+ One-Off
%endif
%endif
|
diff --git a/mylar/auth32p.py b/mylar/auth32p.py
index 644c6d84..8d4f8352 100644
--- a/mylar/auth32p.py
+++ b/mylar/auth32p.py
@@ -75,7 +75,7 @@ class info32p(object):
#need a way to find response code (200=OK), but returns 200 for everything even failed signons (returns a blank page)
#logger.info('[32P] response: ' + str(r.content))
- soup = BeautifulSoup(r.content)
+ soup = BeautifulSoup(r.content, "html.parser")
soup.prettify()
if self.searchterm:
@@ -189,7 +189,7 @@ class info32p(object):
s.cookies = cj
time.sleep(1) #just to make sure we don't hammer, 1s pause.
t = s.get(url, params=params, verify=True)
- soup = BeautifulSoup(t.content)
+ soup = BeautifulSoup(t.content, "html.parser")
results = soup.find_all("a", {"class":"object-qtip"},{"data-type":"torrentgroup"})
data = []
diff --git a/mylar/updater.py b/mylar/updater.py
index 215aa4d2..59ff46a5 100755
--- a/mylar/updater.py
+++ b/mylar/updater.py
@@ -565,7 +565,10 @@ def weekly_update(ComicName, IssueNumber, CStatus, CID, weeknumber, year, altiss
def newpullcheck(ComicName, ComicID, issue=None):
# When adding a new comic, let's check for new issues on this week's pullist and update.
- mylar.weeklypull.pullitcheck(comic1off_name=ComicName, comic1off_id=ComicID, issue=issue)
+ if mylar.ALT_PULL != '2':
+ mylar.weeklypull.pullitcheck(comic1off_name=ComicName, comic1off_id=ComicID, issue=issue)
+ else:
+ mylar.weeklypull.new_pullcheck(weeknumber=mylar.CURRENT_WEEKNUMBER, pullyear=mylar.CURRENT_YEAR, comic1off_name=ComicName, comic1off_id=ComicID, issue=issue)
return
def no_searchresults(ComicID):
diff --git a/mylar/webserve.py b/mylar/webserve.py
index fbbbbae4..e039cb06 100755
--- a/mylar/webserve.py
+++ b/mylar/webserve.py
@@ -2182,44 +2182,46 @@ class WebInterface(object):
def markComics(self, action=None, **args):
myDB = db.DBConnection()
comicsToAdd = []
- logger.info(args)
- for ComicID in args:
- if ComicID == 'manage_comic_length':
+ for k,v in args.items():
+ if k == 'manage_comic_length':
continue
- else:
- for k,v in args.items():
- if k == 'manage_comic_length':
- break
- #k = Comicname[ComicYear]
- #v = ComicID
- comyr = k.find('[')
- ComicYear = re.sub('[\[\]]', '', k[comyr:]).strip()
- ComicName = k[:comyr].strip()
- ComicID = v
- #cid = ComicName.decode('utf-8', 'replace')
+ #k = Comicname[ComicYear]
+ #v = ComicID
+ comyr = k.find('[')
+ ComicYear = re.sub('[\[\]]', '', k[comyr:]).strip()
+ ComicName = k[:comyr].strip()
+ ComicID = v
+ #cid = ComicName.decode('utf-8', 'replace')
- if action == 'delete':
- logger.info('[MANAGE COMICS][DELETION] Now deleting ' + ComicName + ' (' + str(ComicYear) + ') [' + str(ComicID) + '] form the DB.')
- myDB.action('DELETE from comics WHERE ComicID=?', [ComicID])
- myDB.action('DELETE from issues WHERE ComicID=?', [ComicID])
- logger.info('[MANAGE COMICS][DELETION] Successfully deleted ' + ComicName + '(' + str(ComicYear) + ')')
- elif action == 'pause':
- controlValueDict = {'ComicID': ComicID}
- newValueDict = {'Status': 'Paused'}
- myDB.upsert("comics", newValueDict, controlValueDict)
- logger.info('[MANAGE COMICS][PAUSE] ' + ComicName + ' has now been put into a Paused State.')
- elif action == 'resume':
- controlValueDict = {'ComicID': ComicID}
- newValueDict = {'Status': 'Active'}
- myDB.upsert("comics", newValueDict, controlValueDict)
- logger.info('[MANAGE COMICS][RESUME] ' + ComicName + ' has now been put into a Resumed State.')
- else:
- logger.info('appending ' + str(ComicID) + ' to refresh list.')
- comicsToAdd.append(ComicID)
+ if action == 'delete':
+ logger.info('[MANAGE COMICS][DELETION] Now deleting ' + ComicName + ' (' + str(ComicYear) + ') [' + str(ComicID) + '] form the DB.')
+ myDB.action('DELETE from comics WHERE ComicID=?', [ComicID])
+ myDB.action('DELETE from issues WHERE ComicID=?', [ComicID])
+ logger.info('[MANAGE COMICS][DELETION] Successfully deleted ' + ComicName + '(' + str(ComicYear) + ')')
+ elif action == 'pause':
+ controlValueDict = {'ComicID': ComicID}
+ newValueDict = {'Status': 'Paused'}
+ myDB.upsert("comics", newValueDict, controlValueDict)
+ logger.info('[MANAGE COMICS][PAUSE] ' + ComicName + ' has now been put into a Paused State.')
+ elif action == 'resume':
+ controlValueDict = {'ComicID': ComicID}
+ newValueDict = {'Status': 'Active'}
+ myDB.upsert("comics", newValueDict, controlValueDict)
+ logger.info('[MANAGE COMICS][RESUME] ' + ComicName + ' has now been put into a Resumed State.')
+ elif action == 'recheck':
+ comicsToAdd.append({'ComicID': ComicID,
+ 'ComicName': ComicName,
+ 'ComicYear': ComicYear})
+ else:
+ comicsToAdd.append(ComicID)
if len(comicsToAdd) > 0:
- logger.info('[MANAGE COMICS][REFRESH] Refreshing ' + str(len(comicsToAdd)) + ' series')
- threading.Thread(target=updater.dbUpdate, args=[comicsToAdd]).start()
+ if action == 'recheck':
+ logger.info('[MANAGE COMICS][RECHECK-FILES] Rechecking Files for ' + str(len(comicsToAdd)) + ' series')
+ threading.Thread(target=self.forceRescan, args=[comicsToAdd,True]).start()
+ else:
+ logger.info('[MANAGE COMICS][REFRESH] Refreshing ' + str(len(comicsToAdd)) + ' series')
+ threading.Thread(target=updater.dbUpdate, args=[comicsToAdd]).start()
markComics.exposed = True
def forceUpdate(self):
@@ -2234,9 +2236,16 @@ class WebInterface(object):
raise cherrypy.HTTPRedirect("home")
forceSearch.exposed = True
- def forceRescan(self, ComicID):
- threading.Thread(target=updater.forceRescan, args=[ComicID]).start()
- #raise cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % ComicID)
+ def forceRescan(self, ComicID, bulk=False):
+ if bulk:
+ cnt = 1
+ for cid in ComicID:
+ logger.info('[MASS BATCH][RECHECK-FILES][' + str(cnt) + '/' + str(len(ComicID)) + '] Rechecking ' + cid['ComicName'] + '(' + str(cid['ComicYear']) + ')')
+ updater.forceRescan(cid['ComicID'])
+ cnt+=1
+ logger.info('[MASS BATCH][RECHECK-FILES] I have completed rechecking files for ' + str(len(ComicID)) + ' series.')
+ else:
+ threading.Thread(target=updater.forceRescan, args=[ComicID]).start()
forceRescan.exposed = True
def checkGithub(self):
diff --git a/mylar/weeklypull.py b/mylar/weeklypull.py
index 1081c744..87d4b448 100755
--- a/mylar/weeklypull.py
+++ b/mylar/weeklypull.py
@@ -831,7 +831,11 @@ def new_pullcheck(weeknumber, pullyear, comic1off_name=None, comic1off_id=None,
myDB = db.DBConnection()
watchlist = []
weeklylist = []
- comiclist = myDB.select("SELECT * FROM comics WHERE Status='Active'")
+ if comic1off_name:
+ comiclist = myDB.select("SELECT * FROM comics WHERE Status='Active' AND ComicID=?",[comic1off_id])
+ else:
+ comiclist = myDB.select("SELECT * FROM comics WHERE Status='Active'")
+
if comiclist is None:
pass
else:
@@ -926,7 +930,9 @@ def new_pullcheck(weeknumber, pullyear, comic1off_name=None, comic1off_id=None,
ki = []
kc = []
otot = 0
- logger.fdebug("[WALKSOFTLY] You are watching for: " + str(len(weeklylist)) + " comics")
+ if not comic1off_id:
+ logger.fdebug("[WALKSOFTLY] You are watching for: " + str(len(weeklylist)) + " comics")
+
weekly = myDB.select('SELECT a.comicid, IFNULL(a.Comic,IFNULL(b.ComicName, c.ComicName)) as ComicName, a.rowid, a.issue, a.issueid, c.ComicPublisher, a.weeknumber, a.shipdate, a.dynamicname FROM weekly as a INNER JOIN annuals as b INNER JOIN comics as c ON b.releasecomicid = a.comicid OR c.comicid = a.comicid OR c.DynamicComicName = a.dynamicname WHERE weeknumber = ? GROUP BY a.dynamicname', [weeknumber]) #comics INNER JOIN weekly ON comics.DynamicComicName = weekly.dynamicname OR comics.comicid = weekly.comicid INNER JOIN annuals ON annuals.comicid = weekly.comicid WHERE weeknumber = ? GROUP BY weekly.dynamicname', [weeknumber])
for week in weekly:
idmatch = None