diff --git a/data/interfaces/default/manageissues.html b/data/interfaces/default/manageissues.html
index 92b79777..dc933601 100755
--- a/data/interfaces/default/manageissues.html
+++ b/data/interfaces/default/manageissues.html
@@ -85,7 +85,17 @@
${issue['IssueDate']} |
${issue['ReleaseDate']} |
${issue['Status']} |
- |
+
+ <%
+ try:
+ ahash = issue['hash']
+ except:
+ ahash = None
+ %>
+ %if ahash is not None:
+ auto-snatch
+ %endif
+ |
%endfor
diff --git a/data/interfaces/default/searchresults.html b/data/interfaces/default/searchresults.html
index 5d190e88..1462dd26 100755
--- a/data/interfaces/default/searchresults.html
+++ b/data/interfaces/default/searchresults.html
@@ -48,6 +48,10 @@
%if searchresults:
%for result in searchresults:
<%
+ if result['haveit'] == 'No':
+ haveit = 'No'
+ else:
+ haveit = result['haveit']['comicid']
grade = 'Z'
rtype = None
if type != 'story_arc':
@@ -63,7 +67,7 @@
else:
grade = 'A'
- if result['haveit'] != "No":
+ if haveit != "No":
grade = 'H';
%>
@@ -92,7 +96,7 @@
Add this Comic |
%endif
%else:
- Already in library |
+ Already in library |
%endif
%endfor
diff --git a/data/interfaces/default/weeklypull.html b/data/interfaces/default/weeklypull.html
index 35539f07..5167ebdb 100755
--- a/data/interfaces/default/weeklypull.html
+++ b/data/interfaces/default/weeklypull.html
@@ -119,6 +119,9 @@
%if weekly['STATUS'] == 'Snatched' or weekly['STATUS'] == 'Downloaded':
Retry
%endif
+ %if weekly['HASH'] is not None:
+ auto-snatch
+ %endif
%elif weekly['STATUS'] == 'Skipped':
%if weekly['COMICID'] != '' and weekly['COMICID'] is not None:
Add
@@ -129,6 +132,8 @@
Search
%endif
One-Off
+ %elif weekly['HASH'] is not None:
+ auto-snatch
%endif
%endif
diff --git a/mylar/PostProcessor.py b/mylar/PostProcessor.py
index f823afb9..c5aa8c32 100755
--- a/mylar/PostProcessor.py
+++ b/mylar/PostProcessor.py
@@ -313,21 +313,13 @@ class PostProcessor(object):
logger.fdebug (module + ' Manual Run initiated')
#Manual postprocessing on a folder.
#first we get a parsed results list of the files being processed, and then poll against the sql to get a short list of hits.
- flc = filechecker.FileChecker(self.nzb_folder, justparse=True)
+ flc = filechecker.FileChecker(self.nzb_folder, justparse=True, pp_mode=True)
filelist = flc.listFiles()
if filelist['comiccount'] == 0: # is None:
logger.warn('There were no files located - check the debugging logs if you think this is in error.')
return
logger.info('I have located ' + str(filelist['comiccount']) + ' files that I should be able to post-process. Continuing...')
- #load the hashes for torrents so continual post-processing of same issues don't occur.
- pp_crclist = []
- if mylar.CONFIG.ENABLE_TORRENTS:
- pp_crc = myDB.select("SELECT a.crc, b.IssueID FROM Snatched as a INNER JOIN issues as b ON a.IssueID=b.IssueID WHERE a.Status='Post-Processed' and a.crc is not NULL and (b.Status='Downloaded' or b.status='Archived ORDER BY b.IssueDate')")
- for pp in pp_crc:
- pp_crclist.append({'IssueID': pp['IssueID'],
- 'crc': pp['crc']})
-
#preload the entire ALT list in here.
alt_list = []
alt_db = myDB.select("SELECT * FROM Comics WHERE AlternateSearch != 'None'")
@@ -343,13 +335,6 @@ class PostProcessor(object):
manual_arclist = []
for fl in filelist['comiclist']:
- if mylar.CONFIG.ENABLE_TORRENTS:
- crcchk = None
- tcrc = helpers.crc(os.path.join(fl['comiclocation'], fl['comicfilename'].decode(mylar.SYS_ENCODING)))
- crcchk = [x for x in pp_crclist if tcrc == x['crc']]
- if crcchk:
- logger.fdebug('%s Already post-processed this item %s - Ignoring' % (module, crcchk))
- continue
as_d = filechecker.FileChecker()
as_dinfo = as_d.dynamic_replace(helpers.conversion(fl['series_name']))
diff --git a/mylar/config.py b/mylar/config.py
index 461d25bb..10543f6c 100644
--- a/mylar/config.py
+++ b/mylar/config.py
@@ -196,7 +196,7 @@ _CONFIG_DEFINITIONS = OrderedDict({
'SAB_PRIORITY': (str, 'SABnzbd', "Default"),
'SAB_TO_MYLAR': (bool, 'SABnzbd', False),
'SAB_DIRECTORY': (str, 'SABnzbd', None),
- 'SAB_CLIENT_POST_PROCESSING': (bool, 'SABnbzd', False), #0/False: ComicRN.py, #1/True: Completed Download Handling
+ 'SAB_CLIENT_POST_PROCESSING': (bool, 'SABnzbd', False), #0/False: ComicRN.py, #1/True: Completed Download Handling
'NZBGET_HOST': (str, 'NZBGet', None),
'NZBGET_PORT': (str, 'NZBGet', None),
@@ -335,6 +335,15 @@ _CONFIG_DEFINITIONS = OrderedDict({
'OPDS_PASSWORD': (str, 'OPDS', None),
'OPDS_METAINFO': (bool, 'OPDS', False),
+ 'TEST_VALUE': (bool, 'TEST', True),
+})
+
+_BAD_DEFINITIONS = OrderedDict({
+ #for those items that were in wrong sections previously, or sections that are no longer present...
+ #using this method, old values are able to be transfered to the new config items properly.
+ #keyname, section, oldkeyname
+ 'SAB_CLIENT_POST_PROCESSING': ('SABnbzd', None),
+ 'TEST_VALUE': ('TEST', 'TESTVALUE'),
})
class Config(object):
@@ -378,7 +387,27 @@ class Config(object):
x = 'None'
xv.append(x)
value = self.check_setting(xv)
-
+
+ for b, bv in _BAD_DEFINITIONS.iteritems():
+ try:
+ if config.has_section(bv[0]) and any([b == k, bv[1] is None]):
+ cvs = xv
+ if bv[1] is None:
+ ckey = k
+ else:
+ ckey = bv[1]
+ corevalues = [ckey if x == 0 else x for x in cvs]
+ corevalues = [bv[0] if x == corevalues.index(bv[0]) else x for x in cvs]
+ value = self.check_setting(corevalues)
+ if bv[1] is None:
+ config.remove_option(bv[0], ckey.lower())
+ config.remove_section(bv[0])
+ else:
+ config.remove_option(bv[0], bv[1].lower())
+ break
+ except:
+ pass
+
if all([k != 'CONFIG_VERSION', k != 'MINIMAL_INI']):
try:
if v[0] == str and any([value == "", value is None, len(value) == 0, value == 'None']):
diff --git a/mylar/filechecker.py b/mylar/filechecker.py
index 0c267167..8dbfab05 100755
--- a/mylar/filechecker.py
+++ b/mylar/filechecker.py
@@ -36,7 +36,7 @@ from mylar import logger, helpers
class FileChecker(object):
- def __init__(self, dir=None, watchcomic=None, Publisher=None, AlternateSearch=None, manual=None, sarc=None, justparse=None, file=None):
+ def __init__(self, dir=None, watchcomic=None, Publisher=None, AlternateSearch=None, manual=None, sarc=None, justparse=None, file=None, pp_mode=False):
#dir = full path to the series Comic Location (manual pp will just be psssing the already parsed filename)
if dir:
self.dir = dir
@@ -90,6 +90,10 @@ class FileChecker(object):
else:
self.file = None
+ if pp_mode:
+ self.pp_mode = True
+ else:
+ self.pp_mode = False
self.failed_files = []
self.dynamic_handlers = ['/','-',':','\'',',','&','?','!','+','(',')','\u2014']
@@ -132,7 +136,7 @@ class FileChecker(object):
if filename.startswith('.'):
continue
- logger.info('[FILENAME]: ' + filename)
+ #logger.info('[FILENAME]: ' + filename)
runresults = self.parseit(self.dir, filename, filedir)
if runresults:
try:
@@ -1118,6 +1122,14 @@ class FileChecker(object):
dir = dir.encode(mylar.SYS_ENCODING)
+ if all([mylar.CONFIG.ENABLE_TORRENTS is True, self.pp_mode is True]):
+ import db
+ myDB = db.DBConnection()
+ pp_crc = myDB.select("SELECT a.crc, b.IssueID FROM Snatched as a INNER JOIN issues as b ON a.IssueID=b.IssueID WHERE (a.Status='Post-Processed' or a.status='Snatched' or a.provider='32P') and a.crc is not NULL and (b.Status='Downloaded' or b.status='Archived') GROUP BY a.crc ORDER BY a.DateAdded")
+ for pp in pp_crc:
+ pp_crclist.append({'IssueID': pp['IssueID'],
+ 'crc': pp['crc']})
+
for (dirname, subs, files) in os.walk(dir):
for fname in files:
@@ -1129,6 +1141,13 @@ class FileChecker(object):
#Ignoring MAC OS Finder directory of cached files (/.AppleDouble/)
continue
+ if all([mylar.CONFIG.ENABLE_TORRENTS is True, self.pp_mode is True]):
+ tcrc = helpers.crc(os.path.join(dirname, fname).decode(mylar.SYS_ENCODING))
+ crcchk = [x for x in pp_crclist if tcrc == x['crc']]
+ if crcchk:
+ #logger.fdebug('%s Already post-processed this item %s - Ignoring' % fname)
+ continue
+
if os.path.splitext(fname)[1].lower().endswith(comic_ext):
if direc is None:
comicsize = os.path.getsize(os.path.join(dir, fname))
diff --git a/mylar/helpers.py b/mylar/helpers.py
index 3d2baefe..cebb7c58 100755
--- a/mylar/helpers.py
+++ b/mylar/helpers.py
@@ -2933,10 +2933,10 @@ def nzb_monitor(queue):
if item == 'exit':
logger.info('Cleaning up workers for shutdown')
break
- if mylar.CONFIG.SAB_CLIENT_POST_PROCESSING is True:
+ if all([mylar.USE_SABNZBD is True, mylar.CONFIG.SAB_CLIENT_POST_PROCESSING is True]):
nz = sabnzbd.SABnzbd(item)
nzstat = nz.processor()
- elif mylar.CONFIG.NZBGET_CLIENT_POST_PROCESSING is True:
+ elif all([mylar.USE_NZBGET is True, mylar.CONFIG.NZBGET_CLIENT_POST_PROCESSING is True]):
nz = nzbget.NZBGet()
nzstat = nz.processor(item)
else:
diff --git a/mylar/nzbget.py b/mylar/nzbget.py
index 6f8f837f..865d4bba 100644
--- a/mylar/nzbget.py
+++ b/mylar/nzbget.py
@@ -116,7 +116,9 @@ class NZBGet(object):
logger.fdebug('location found @ %s' % hq[0]['DestDir'])
return {'status': True,
'name': re.sub('.nzb', '', hq[0]['NZBName']).strip(),
- 'location': hq[0]['DestDir']}
+ 'location': hq[0]['DestDir'],
+ 'failed': False}
+
else:
logger.warn('no file found where it should be @ %s - is there another script that moves things after completion ?' % hq[0]['DestDir'])
return {'status': False}
diff --git a/mylar/webserve.py b/mylar/webserve.py
index ee5c1542..c8eab63d 100644
--- a/mylar/webserve.py
+++ b/mylar/webserve.py
@@ -1523,6 +1523,7 @@ class WebInterface(object):
watchlibrary = helpers.listLibrary()
issueLibrary = helpers.listIssues(weekinfo['weeknumber'], weekinfo['year'])
oneofflist = helpers.listoneoffs(weekinfo['weeknumber'], weekinfo['year'])
+ chklist = []
for weekly in w_results:
xfound = False
@@ -1577,6 +1578,7 @@ class WebInterface(object):
"SERIESYEAR": weekly['seriesyear'],
"HAVEIT": haveit,
"LINK": linkit,
+ "HASH": None,
"AUTOWANT": False
})
else:
@@ -1592,6 +1594,7 @@ class WebInterface(object):
"SERIESYEAR": weekly['seriesyear'],
"HAVEIT": haveit,
"LINK": linkit,
+ "HASH": None,
"AUTOWANT": True
})
else:
@@ -1606,11 +1609,15 @@ class WebInterface(object):
"SERIESYEAR": weekly['seriesyear'],
"HAVEIT": haveit,
"LINK": linkit,
+ "HASH": None,
"AUTOWANT": False
})
if tmp_status == 'Wanted':
wantedcount +=1
+ elif tmp_status == 'Snatched':
+ chklist.append(str(weekly['IssueID']))
+
weeklyresults = sorted(weeklyresults, key=itemgetter('PUBLISHER', 'COMIC'), reverse=False)
else:
@@ -1619,6 +1626,23 @@ class WebInterface(object):
if generateonly is True:
return weeklyresults, weekinfo
else:
+ endresults = []
+ if len(chklist) > 0:
+ for genlist in helpers.chunker(chklist, 200):
+ tmpsql = "SELECT * FROM snatched where Status='Snatched' and status != 'Post-Processed' and (provider='32P' or Provider='WWT' or Provider='DEM') AND IssueID in ({seq})".format(seq=','.join(['?'] *(len(genlist))))
+ chkthis = myDB.select(tmpsql, genlist)
+ if chkthis is None:
+ continue
+ else:
+ for w in weeklyresults:
+ weekit = w
+ snatchit = [x['hash'] for x in chkthis if w['ISSUEID'] == x['IssueID']]
+ if snatchit:
+ logger.fdebug('[%s] Discovered previously snatched torrent not downloaded. Marking for manual auto-snatch retrieval: %s' % (w['COMIC'], ''.join(snatchit)))
+ weekit['HASH'] = ''.join(snatchit)
+ endresults.append(weekit)
+ weeklyresults = endresults
+
if week:
return serve_template(templatename="weeklypull.html", title="Weekly Pull", weeklyresults=weeklyresults, pullfilter=True, weekfold=weekinfo['week_folder'], wantedcount=wantedcount, weekinfo=weekinfo)
else:
@@ -2188,6 +2212,7 @@ class WebInterface(object):
def manageIssues(self, **kwargs):
status = kwargs['status']
results = []
+ resultlist = []
myDB = db.DBConnection()
if mylar.CONFIG.ANNUALS_ON:
issues = myDB.select("SELECT * from issues WHERE Status=? AND ComicName NOT LIKE '%Annual%'", [status])
@@ -2197,8 +2222,26 @@ class WebInterface(object):
annuals = []
for iss in issues:
results.append(iss)
+ resultlist.append(str(iss['IssueID']))
for ann in annuals:
results.append(ann)
+ resultlist.append(str(iss['IssueID']))
+ endresults = []
+ if status == 'Snatched':
+ for genlist in helpers.chunker(resultlist, 200):
+ tmpsql = "SELECT * FROM snatched where Status='Snatched' and status != 'Post-Processed' and (provider='32P' or Provider='WWT' or Provider='DEM') AND IssueID in ({seq})".format(seq=','.join(['?'] *(len(genlist))))
+ chkthis = myDB.select(tmpsql, genlist)
+ if chkthis is None:
+ continue
+ else:
+ for r in results:
+ rr = dict(r)
+ snatchit = [x['hash'] for x in chkthis if r['ISSUEID'] == x['IssueID']]
+ if snatchit:
+ logger.fdebug('[%s] Discovered previously snatched torrent not downloaded. Marking for manual auto-snatch retrieval: %s' % (r['ComicName'], ''.join(snatchit)))
+ rr['hash'] = ''.join(snatchit)
+ endresults.append(rr)
+ results = endresults
return serve_template(templatename="manageissues.html", title="Manage " + str(status) + " Issues", issues=results, status=status)
manageIssues.exposed = True
@@ -4588,7 +4631,6 @@ class WebInterface(object):
def configUpdate(self, **kwargs):
-
checked_configs = ['enable_https', 'launch_browser', 'syno_fix', 'auto_update', 'annuals_on', 'api_enabled', 'nzb_startup_search',
'enforce_perms', 'sab_to_mylar', 'torrent_local', 'torrent_seedbox', 'rtorrent_ssl', 'rtorrent_verify', 'rtorrent_startonload',
'enable_torrents', 'qbittorrent_startonload', 'enable_rss', 'nzbsu', 'nzbsu_verify',