mirror of https://github.com/evilhero/mylar
FIX: NZBGet auto-complete would fail silently even after sending to client, FIX: If both SAB & NZBGET completed download handling were enabled, would incorrectly queue against the wrong item, FIX: Config entry was located under an incorrectly named section, IMP: Config module can now move values across renamed values/sections and clean up erroneously named ones via dict, IMP: Improved manual PP-speed when combined with torrents as will now just scan the directory for previous completion vs scanning and parsing, IMP: Auto-snatch can now snatch previously Snatched torrents manually via Manage Issues tab or Weekly tab, FIX: Already in Library option in search results would use the incorrect value for the localized link
This commit is contained in:
parent
25aa51fc90
commit
a5db9d61a8
|
@ -85,7 +85,17 @@
|
|||
<td id="pubdate">${issue['IssueDate']}</td>
|
||||
<td id="releasedate">${issue['ReleaseDate']}</td>
|
||||
<td id="status">${issue['Status']}</td>
|
||||
<td id="options"></td>
|
||||
<td id="options">
|
||||
<%
|
||||
try:
|
||||
ahash = issue['hash']
|
||||
except:
|
||||
ahash = None
|
||||
%>
|
||||
%if ahash is not None:
|
||||
<a href="#" onclick="doAjaxCall('torrentit?torrent_hash=${ahash}&download=True',$(this),'table')" title="Auto-Snatch torrent file"><span class="ui-icon ui-icon-plus"></span>auto-snatch</a>
|
||||
%endif
|
||||
</td>
|
||||
</tr>
|
||||
%endfor
|
||||
</tbody>
|
||||
|
|
|
@ -48,6 +48,10 @@
|
|||
%if searchresults:
|
||||
%for result in searchresults:
|
||||
<%
|
||||
if result['haveit'] == 'No':
|
||||
haveit = 'No'
|
||||
else:
|
||||
haveit = result['haveit']['comicid']
|
||||
grade = 'Z'
|
||||
rtype = None
|
||||
if type != 'story_arc':
|
||||
|
@ -63,7 +67,7 @@
|
|||
else:
|
||||
grade = 'A'
|
||||
|
||||
if result['haveit'] != "No":
|
||||
if haveit != "No":
|
||||
grade = 'H';
|
||||
%>
|
||||
<tr class="grade${grade}">
|
||||
|
@ -92,7 +96,7 @@
|
|||
<td class="add" nowrap="nowrap"><a href="addComic?comicid=${result['comicid']}&comicname=${result['name'] |u}&comicyear=${result['comicyear']}&comicpublisher=${result['publisher'] |u}&comicimage=${result['comicimage']}&comicissues=${result['issues']}&imported=${imported}&ogcname=${ogcname}&serinfo=${serinfo}"><span class="ui-icon ui-icon-plus"></span> Add this Comic</a></td>
|
||||
%endif
|
||||
%else:
|
||||
<td class="add" nowrap="nowrap"><a href="comicDetails?ComicID=${result['haveit']}"><span class="ui-icon ui-icon-arrowreturnthick-1-n"></span> Already in library</a></td>
|
||||
<td class="add" nowrap="nowrap"><a href="comicDetails?ComicID=${haveit}"><span class="ui-icon ui-icon-arrowreturnthick-1-n"></span> Already in library</a></td>
|
||||
%endif
|
||||
</tr>
|
||||
%endfor
|
||||
|
|
|
@ -119,6 +119,9 @@
|
|||
%if weekly['STATUS'] == 'Snatched' or weekly['STATUS'] == 'Downloaded':
|
||||
<a href="#" onclick="doAjaxCall('queueit?ComicName=${weekly['COMIC'] | u}&ComicID=${weekly['COMICID']}&IssueID=${weekly['ISSUEID']}&ComicIssue=${weekly['ISSUE']}&mode=pullwant&Publisher=${weekly['PUBLISHER']}&pullinfo=${weekinfo['midweek']}&pullweek=${weekinfo['weeknumber']}&pullyear=${weekinfo['year']}',$(this),'table')" data-success="Successfully submitted search request for ${weekly['COMIC']} #${weekly['ISSUE']}" title="Snatch issue again as a One-Off"><span class="ui-icon ui-icon-plus"></span>Retry</a>
|
||||
%endif
|
||||
%if weekly['HASH'] is not None:
|
||||
<a href="#" onclick="doAjaxCall('torrentit?torrent_hash=${weekly['HASH']}&download=True',$(this),'table')" title="Auto-Snatch torrent file"><span class="ui-icon ui-icon-plus"></span>auto-snatch</a>
|
||||
%endif
|
||||
%elif weekly['STATUS'] == 'Skipped':
|
||||
%if weekly['COMICID'] != '' and weekly['COMICID'] is not None:
|
||||
<a href="#" title="auto-add by ID available for this series" onclick="doAjaxCall('addbyid?comicid=${weekly['COMICID']}&calledby=True',$(this),'table')" data-success="${weekly['COMIC']} is now being added to your wachlist."><span class="ui-icon ui-icon-plus"></span>Add</a>
|
||||
|
@ -129,6 +132,8 @@
|
|||
<a href="searchit?name=${weekly['COMIC'] | u}&issue=${weekly['ISSUE']}&mode=pullseries" title="Search for this series to add to your watchlist"><span class="ui-icon ui-icon-plus"></span>Search</a>
|
||||
%endif
|
||||
<a href="#" onclick="doAjaxCall('queueit?ComicName=${weekly['COMIC'] | u}&ComicID=${weekly['COMICID']}&IssueID=${weekly['ISSUEID']}&ComicIssue=${weekly['ISSUE']}&mode=pullwant&Publisher=${weekly['PUBLISHER']}&pullinfo=${weekinfo['midweek']}&pullweek=${weekinfo['weeknumber']}&pullyear=${weekinfo['year']}',$(this),'table')" data-success="Successfully submitted search request for ${weekly['COMIC']} #${weekly['ISSUE']}" title="Just grab it"><span class="ui-icon ui-icon-plus"></span>One-Off</a>
|
||||
%elif weekly['HASH'] is not None:
|
||||
<a href="#" onclick="doAjaxCall('torrentit?torrent_hash=${weekly['HASH']}&download=True',$(this),'table')" title="Auto-Snatch torrent file"><span class="ui-icon ui-icon-plus"></span>auto-snatch</a>
|
||||
%endif
|
||||
%endif
|
||||
</td>
|
||||
|
|
|
@ -313,21 +313,13 @@ class PostProcessor(object):
|
|||
logger.fdebug (module + ' Manual Run initiated')
|
||||
#Manual postprocessing on a folder.
|
||||
#first we get a parsed results list of the files being processed, and then poll against the sql to get a short list of hits.
|
||||
flc = filechecker.FileChecker(self.nzb_folder, justparse=True)
|
||||
flc = filechecker.FileChecker(self.nzb_folder, justparse=True, pp_mode=True)
|
||||
filelist = flc.listFiles()
|
||||
if filelist['comiccount'] == 0: # is None:
|
||||
logger.warn('There were no files located - check the debugging logs if you think this is in error.')
|
||||
return
|
||||
logger.info('I have located ' + str(filelist['comiccount']) + ' files that I should be able to post-process. Continuing...')
|
||||
|
||||
#load the hashes for torrents so continual post-processing of same issues don't occur.
|
||||
pp_crclist = []
|
||||
if mylar.CONFIG.ENABLE_TORRENTS:
|
||||
pp_crc = myDB.select("SELECT a.crc, b.IssueID FROM Snatched as a INNER JOIN issues as b ON a.IssueID=b.IssueID WHERE a.Status='Post-Processed' and a.crc is not NULL and (b.Status='Downloaded' or b.status='Archived ORDER BY b.IssueDate')")
|
||||
for pp in pp_crc:
|
||||
pp_crclist.append({'IssueID': pp['IssueID'],
|
||||
'crc': pp['crc']})
|
||||
|
||||
#preload the entire ALT list in here.
|
||||
alt_list = []
|
||||
alt_db = myDB.select("SELECT * FROM Comics WHERE AlternateSearch != 'None'")
|
||||
|
@ -343,13 +335,6 @@ class PostProcessor(object):
|
|||
manual_arclist = []
|
||||
|
||||
for fl in filelist['comiclist']:
|
||||
if mylar.CONFIG.ENABLE_TORRENTS:
|
||||
crcchk = None
|
||||
tcrc = helpers.crc(os.path.join(fl['comiclocation'], fl['comicfilename'].decode(mylar.SYS_ENCODING)))
|
||||
crcchk = [x for x in pp_crclist if tcrc == x['crc']]
|
||||
if crcchk:
|
||||
logger.fdebug('%s Already post-processed this item %s - Ignoring' % (module, crcchk))
|
||||
continue
|
||||
|
||||
as_d = filechecker.FileChecker()
|
||||
as_dinfo = as_d.dynamic_replace(helpers.conversion(fl['series_name']))
|
||||
|
|
|
@ -196,7 +196,7 @@ _CONFIG_DEFINITIONS = OrderedDict({
|
|||
'SAB_PRIORITY': (str, 'SABnzbd', "Default"),
|
||||
'SAB_TO_MYLAR': (bool, 'SABnzbd', False),
|
||||
'SAB_DIRECTORY': (str, 'SABnzbd', None),
|
||||
'SAB_CLIENT_POST_PROCESSING': (bool, 'SABnbzd', False), #0/False: ComicRN.py, #1/True: Completed Download Handling
|
||||
'SAB_CLIENT_POST_PROCESSING': (bool, 'SABnzbd', False), #0/False: ComicRN.py, #1/True: Completed Download Handling
|
||||
|
||||
'NZBGET_HOST': (str, 'NZBGet', None),
|
||||
'NZBGET_PORT': (str, 'NZBGet', None),
|
||||
|
@ -335,6 +335,15 @@ _CONFIG_DEFINITIONS = OrderedDict({
|
|||
'OPDS_PASSWORD': (str, 'OPDS', None),
|
||||
'OPDS_METAINFO': (bool, 'OPDS', False),
|
||||
|
||||
'TEST_VALUE': (bool, 'TEST', True),
|
||||
})
|
||||
|
||||
_BAD_DEFINITIONS = OrderedDict({
|
||||
#for those items that were in wrong sections previously, or sections that are no longer present...
|
||||
#using this method, old values are able to be transfered to the new config items properly.
|
||||
#keyname, section, oldkeyname
|
||||
'SAB_CLIENT_POST_PROCESSING': ('SABnbzd', None),
|
||||
'TEST_VALUE': ('TEST', 'TESTVALUE'),
|
||||
})
|
||||
|
||||
class Config(object):
|
||||
|
@ -378,7 +387,27 @@ class Config(object):
|
|||
x = 'None'
|
||||
xv.append(x)
|
||||
value = self.check_setting(xv)
|
||||
|
||||
|
||||
for b, bv in _BAD_DEFINITIONS.iteritems():
|
||||
try:
|
||||
if config.has_section(bv[0]) and any([b == k, bv[1] is None]):
|
||||
cvs = xv
|
||||
if bv[1] is None:
|
||||
ckey = k
|
||||
else:
|
||||
ckey = bv[1]
|
||||
corevalues = [ckey if x == 0 else x for x in cvs]
|
||||
corevalues = [bv[0] if x == corevalues.index(bv[0]) else x for x in cvs]
|
||||
value = self.check_setting(corevalues)
|
||||
if bv[1] is None:
|
||||
config.remove_option(bv[0], ckey.lower())
|
||||
config.remove_section(bv[0])
|
||||
else:
|
||||
config.remove_option(bv[0], bv[1].lower())
|
||||
break
|
||||
except:
|
||||
pass
|
||||
|
||||
if all([k != 'CONFIG_VERSION', k != 'MINIMAL_INI']):
|
||||
try:
|
||||
if v[0] == str and any([value == "", value is None, len(value) == 0, value == 'None']):
|
||||
|
|
|
@ -36,7 +36,7 @@ from mylar import logger, helpers
|
|||
|
||||
class FileChecker(object):
|
||||
|
||||
def __init__(self, dir=None, watchcomic=None, Publisher=None, AlternateSearch=None, manual=None, sarc=None, justparse=None, file=None):
|
||||
def __init__(self, dir=None, watchcomic=None, Publisher=None, AlternateSearch=None, manual=None, sarc=None, justparse=None, file=None, pp_mode=False):
|
||||
#dir = full path to the series Comic Location (manual pp will just be psssing the already parsed filename)
|
||||
if dir:
|
||||
self.dir = dir
|
||||
|
@ -90,6 +90,10 @@ class FileChecker(object):
|
|||
else:
|
||||
self.file = None
|
||||
|
||||
if pp_mode:
|
||||
self.pp_mode = True
|
||||
else:
|
||||
self.pp_mode = False
|
||||
|
||||
self.failed_files = []
|
||||
self.dynamic_handlers = ['/','-',':','\'',',','&','?','!','+','(',')','\u2014']
|
||||
|
@ -132,7 +136,7 @@ class FileChecker(object):
|
|||
if filename.startswith('.'):
|
||||
continue
|
||||
|
||||
logger.info('[FILENAME]: ' + filename)
|
||||
#logger.info('[FILENAME]: ' + filename)
|
||||
runresults = self.parseit(self.dir, filename, filedir)
|
||||
if runresults:
|
||||
try:
|
||||
|
@ -1118,6 +1122,14 @@ class FileChecker(object):
|
|||
|
||||
dir = dir.encode(mylar.SYS_ENCODING)
|
||||
|
||||
if all([mylar.CONFIG.ENABLE_TORRENTS is True, self.pp_mode is True]):
|
||||
import db
|
||||
myDB = db.DBConnection()
|
||||
pp_crc = myDB.select("SELECT a.crc, b.IssueID FROM Snatched as a INNER JOIN issues as b ON a.IssueID=b.IssueID WHERE (a.Status='Post-Processed' or a.status='Snatched' or a.provider='32P') and a.crc is not NULL and (b.Status='Downloaded' or b.status='Archived') GROUP BY a.crc ORDER BY a.DateAdded")
|
||||
for pp in pp_crc:
|
||||
pp_crclist.append({'IssueID': pp['IssueID'],
|
||||
'crc': pp['crc']})
|
||||
|
||||
for (dirname, subs, files) in os.walk(dir):
|
||||
|
||||
for fname in files:
|
||||
|
@ -1129,6 +1141,13 @@ class FileChecker(object):
|
|||
#Ignoring MAC OS Finder directory of cached files (/.AppleDouble/<name of file(s)>)
|
||||
continue
|
||||
|
||||
if all([mylar.CONFIG.ENABLE_TORRENTS is True, self.pp_mode is True]):
|
||||
tcrc = helpers.crc(os.path.join(dirname, fname).decode(mylar.SYS_ENCODING))
|
||||
crcchk = [x for x in pp_crclist if tcrc == x['crc']]
|
||||
if crcchk:
|
||||
#logger.fdebug('%s Already post-processed this item %s - Ignoring' % fname)
|
||||
continue
|
||||
|
||||
if os.path.splitext(fname)[1].lower().endswith(comic_ext):
|
||||
if direc is None:
|
||||
comicsize = os.path.getsize(os.path.join(dir, fname))
|
||||
|
|
|
@ -2933,10 +2933,10 @@ def nzb_monitor(queue):
|
|||
if item == 'exit':
|
||||
logger.info('Cleaning up workers for shutdown')
|
||||
break
|
||||
if mylar.CONFIG.SAB_CLIENT_POST_PROCESSING is True:
|
||||
if all([mylar.USE_SABNZBD is True, mylar.CONFIG.SAB_CLIENT_POST_PROCESSING is True]):
|
||||
nz = sabnzbd.SABnzbd(item)
|
||||
nzstat = nz.processor()
|
||||
elif mylar.CONFIG.NZBGET_CLIENT_POST_PROCESSING is True:
|
||||
elif all([mylar.USE_NZBGET is True, mylar.CONFIG.NZBGET_CLIENT_POST_PROCESSING is True]):
|
||||
nz = nzbget.NZBGet()
|
||||
nzstat = nz.processor(item)
|
||||
else:
|
||||
|
|
|
@ -116,7 +116,9 @@ class NZBGet(object):
|
|||
logger.fdebug('location found @ %s' % hq[0]['DestDir'])
|
||||
return {'status': True,
|
||||
'name': re.sub('.nzb', '', hq[0]['NZBName']).strip(),
|
||||
'location': hq[0]['DestDir']}
|
||||
'location': hq[0]['DestDir'],
|
||||
'failed': False}
|
||||
|
||||
else:
|
||||
logger.warn('no file found where it should be @ %s - is there another script that moves things after completion ?' % hq[0]['DestDir'])
|
||||
return {'status': False}
|
||||
|
|
|
@ -1523,6 +1523,7 @@ class WebInterface(object):
|
|||
watchlibrary = helpers.listLibrary()
|
||||
issueLibrary = helpers.listIssues(weekinfo['weeknumber'], weekinfo['year'])
|
||||
oneofflist = helpers.listoneoffs(weekinfo['weeknumber'], weekinfo['year'])
|
||||
chklist = []
|
||||
|
||||
for weekly in w_results:
|
||||
xfound = False
|
||||
|
@ -1577,6 +1578,7 @@ class WebInterface(object):
|
|||
"SERIESYEAR": weekly['seriesyear'],
|
||||
"HAVEIT": haveit,
|
||||
"LINK": linkit,
|
||||
"HASH": None,
|
||||
"AUTOWANT": False
|
||||
})
|
||||
else:
|
||||
|
@ -1592,6 +1594,7 @@ class WebInterface(object):
|
|||
"SERIESYEAR": weekly['seriesyear'],
|
||||
"HAVEIT": haveit,
|
||||
"LINK": linkit,
|
||||
"HASH": None,
|
||||
"AUTOWANT": True
|
||||
})
|
||||
else:
|
||||
|
@ -1606,11 +1609,15 @@ class WebInterface(object):
|
|||
"SERIESYEAR": weekly['seriesyear'],
|
||||
"HAVEIT": haveit,
|
||||
"LINK": linkit,
|
||||
"HASH": None,
|
||||
"AUTOWANT": False
|
||||
})
|
||||
|
||||
if tmp_status == 'Wanted':
|
||||
wantedcount +=1
|
||||
elif tmp_status == 'Snatched':
|
||||
chklist.append(str(weekly['IssueID']))
|
||||
|
||||
|
||||
weeklyresults = sorted(weeklyresults, key=itemgetter('PUBLISHER', 'COMIC'), reverse=False)
|
||||
else:
|
||||
|
@ -1619,6 +1626,23 @@ class WebInterface(object):
|
|||
if generateonly is True:
|
||||
return weeklyresults, weekinfo
|
||||
else:
|
||||
endresults = []
|
||||
if len(chklist) > 0:
|
||||
for genlist in helpers.chunker(chklist, 200):
|
||||
tmpsql = "SELECT * FROM snatched where Status='Snatched' and status != 'Post-Processed' and (provider='32P' or Provider='WWT' or Provider='DEM') AND IssueID in ({seq})".format(seq=','.join(['?'] *(len(genlist))))
|
||||
chkthis = myDB.select(tmpsql, genlist)
|
||||
if chkthis is None:
|
||||
continue
|
||||
else:
|
||||
for w in weeklyresults:
|
||||
weekit = w
|
||||
snatchit = [x['hash'] for x in chkthis if w['ISSUEID'] == x['IssueID']]
|
||||
if snatchit:
|
||||
logger.fdebug('[%s] Discovered previously snatched torrent not downloaded. Marking for manual auto-snatch retrieval: %s' % (w['COMIC'], ''.join(snatchit)))
|
||||
weekit['HASH'] = ''.join(snatchit)
|
||||
endresults.append(weekit)
|
||||
weeklyresults = endresults
|
||||
|
||||
if week:
|
||||
return serve_template(templatename="weeklypull.html", title="Weekly Pull", weeklyresults=weeklyresults, pullfilter=True, weekfold=weekinfo['week_folder'], wantedcount=wantedcount, weekinfo=weekinfo)
|
||||
else:
|
||||
|
@ -2188,6 +2212,7 @@ class WebInterface(object):
|
|||
def manageIssues(self, **kwargs):
|
||||
status = kwargs['status']
|
||||
results = []
|
||||
resultlist = []
|
||||
myDB = db.DBConnection()
|
||||
if mylar.CONFIG.ANNUALS_ON:
|
||||
issues = myDB.select("SELECT * from issues WHERE Status=? AND ComicName NOT LIKE '%Annual%'", [status])
|
||||
|
@ -2197,8 +2222,26 @@ class WebInterface(object):
|
|||
annuals = []
|
||||
for iss in issues:
|
||||
results.append(iss)
|
||||
resultlist.append(str(iss['IssueID']))
|
||||
for ann in annuals:
|
||||
results.append(ann)
|
||||
resultlist.append(str(iss['IssueID']))
|
||||
endresults = []
|
||||
if status == 'Snatched':
|
||||
for genlist in helpers.chunker(resultlist, 200):
|
||||
tmpsql = "SELECT * FROM snatched where Status='Snatched' and status != 'Post-Processed' and (provider='32P' or Provider='WWT' or Provider='DEM') AND IssueID in ({seq})".format(seq=','.join(['?'] *(len(genlist))))
|
||||
chkthis = myDB.select(tmpsql, genlist)
|
||||
if chkthis is None:
|
||||
continue
|
||||
else:
|
||||
for r in results:
|
||||
rr = dict(r)
|
||||
snatchit = [x['hash'] for x in chkthis if r['ISSUEID'] == x['IssueID']]
|
||||
if snatchit:
|
||||
logger.fdebug('[%s] Discovered previously snatched torrent not downloaded. Marking for manual auto-snatch retrieval: %s' % (r['ComicName'], ''.join(snatchit)))
|
||||
rr['hash'] = ''.join(snatchit)
|
||||
endresults.append(rr)
|
||||
results = endresults
|
||||
|
||||
return serve_template(templatename="manageissues.html", title="Manage " + str(status) + " Issues", issues=results, status=status)
|
||||
manageIssues.exposed = True
|
||||
|
@ -4588,7 +4631,6 @@ class WebInterface(object):
|
|||
|
||||
|
||||
def configUpdate(self, **kwargs):
|
||||
|
||||
checked_configs = ['enable_https', 'launch_browser', 'syno_fix', 'auto_update', 'annuals_on', 'api_enabled', 'nzb_startup_search',
|
||||
'enforce_perms', 'sab_to_mylar', 'torrent_local', 'torrent_seedbox', 'rtorrent_ssl', 'rtorrent_verify', 'rtorrent_startonload',
|
||||
'enable_torrents', 'qbittorrent_startonload', 'enable_rss', 'nzbsu', 'nzbsu_verify',
|
||||
|
|
Loading…
Reference in New Issue