FIX:(#1630) One-off issues will now be post-processed according to grab-bag dir location, IMP: One-off pull-list issues will now have the status marked accordingly on weekly pullist for tracking purposes, FIX: Fix for utorrent not being able to update status properly after initial snatch, FIX: Fix for some incorrect variable references due to different snatch methods when trying to run on-snatch scripts, FIX: Fix for auto-torrent-snatcher trying to snatch a torrent with an '&' in the filename, FIX:(#1637) Fix for telegram not saving on snatch toggle properly, IMP: added additional logging for metatagging when attempting to create temp folder in cache location, IMP: will now clean up any left over metagtagging directories that fail during normal post-processing instead of leaving and continuing, FIX: Will notify on one-off grab's/storyarc's being post-processed, FIX: fix for displaying cbz issue details (the 'i' on the comic details page) in which the series title contained non-alphanumeric characters, IMP: Added ability to change status to Wanted on Upcoming tab (will perform an immediate search on selected issues), IMP: Threaded the one-off search call from the weeklypull

This commit is contained in:
evilhero 2017-06-02 10:05:06 -04:00
parent 33b6de987b
commit 535c557e30
14 changed files with 326 additions and 138 deletions

View File

@ -396,7 +396,7 @@
%if linky:
<a href="downloadthis?pathfile=${linky |u}"><img src="interfaces/default/images/download_icon.png" height="25" width="25" title="Download the Issue" class="highqual" /></a>
%if linky.endswith('.cbz'):
<a href="#issue-box" onclick="return runMetaIssue('${linky |u}', '${comic['ComicName']}', '${issue['Issue_Number']}', '${issue['IssueDate']}', '${issue['IssueName'] |u}');" class="issue-window"><img src="interfaces/default/images/issueinfo.png" height="25" width="25" title="View Issue Details" class="highqual" /></a>
<a href="#issue-box" onclick="return runMetaIssue('${linky |u}', '${comic['ComicName']| u}', '${issue['Issue_Number']}', '${issue['IssueDate']}', '${issue['IssueName'] |u}');" class="issue-window"><img src="interfaces/default/images/issueinfo.png" height="25" width="25" title="View Issue Details" class="highqual" /></a>
<div id="issue-box" class="issue-popup">
<a href="#" class="close"><img src="interfaces/default/images/close_pop.png" class="btn_close" title="Close Window" alt="Close" class="highqual" /></a>
<fieldset>
@ -530,7 +530,7 @@
%if linky:
<a href="downloadthis?pathfile=${linky |u}"><img src="interfaces/default/images/download_icon.png" height="25" width="25" title="Download the annual" class="highqual" /></a>
%if linky.endswith('.cbz'):
<a href="#issue-box" onclick="return runMetaIssue('${linky |u}', '${comic['ComicName']}', '${annual['Issue_Number']}', '${annual['IssueDate']}', '${annual['IssueName']}');" class="issue-window"><img src="interfaces/default/images/issueinfo.png" height="25" width="25" title="View Issue Details" class="highqual" /></a>
<a href="#issue-box" onclick="return runMetaIssue('${linky |u}', '${comic['ComicName']| u}', '${annual['Issue_Number']}', '${annual['IssueDate']}', '${annual['IssueName']| u}');" class="issue-window"><img src="interfaces/default/images/issueinfo.png" height="25" width="25" title="View Issue Details" class="highqual" /></a>
<div id="issue-box" class="issue-popup">
<a href="#" class="close"><img src="interfaces/default/images/close_pop.png" class="btn_close" title="Close Window" alt="Close" class="highqual" /></a>
<fieldset>

View File

@ -38,6 +38,7 @@
Mark selected issues as
<select name="action" onChange="doAjaxCall('markissues',$(this),'table',true);" data-error="You didn't select any issues" data-success="selected issues marked">
<option disabled="disabled" selected="selected">Choose...</option>
<option value="Wanted">Wanted</option>
<option value="Skipped">Skipped</option>
<option value="Downloaded">Downloaded</option>
<option value="Archived">Archived</option>

View File

@ -78,6 +78,9 @@
if weekly['AUTOWANT'] == True:
grade = 'H'
if weekly['HAVEIT'] == 'OneOff':
grade = 'H'
#if the comicid is present, but issue isn't marked as wanted.
if weekly['HAVEIT'] == 'Yes' and weekly['STATUS'] == 'Skipped':
grade = 'E'
@ -93,6 +96,8 @@
%else:
${weekly['COMIC']}
%endif
%elif weekly['HAVEIT'] == 'OneOff':
<a href="#">${weekly['COMIC']}</a>
%else:
<a href="comicDetails?ComicID=${weekly['COMICID']}">${weekly['COMIC']}</a>
%endif
@ -102,7 +107,11 @@
<td class="status">Auto-Want
%else:
<td class="status">${weekly['STATUS']}
%if weekly['STATUS'] == 'Skipped':
%if weekly['HAVEIT'] == 'OneOff':
%if weekly['STATUS'] == 'Snatched' or weekly['STATUS'] == 'Downloaded':
<a href="#" onclick="doAjaxCall('queueit?ComicName=${weekly['COMIC'] | u}&ComicID=${weekly['COMICID']}&IssueID=${weekly['ISSUEID']}&ComicIssue=${weekly['ISSUE']}&mode=pullwant&Publisher=${weekly['PUBLISHER']}&pullinfo=${weekinfo['midweek']}&pullweek=${weekinfo['weeknumber']}&pullyear=${weekinfo['year']}',$(this),'table')" data-success="Successfully submitted search request for ${weekly['COMIC']} #${weekly['ISSUE']}" title="Snatch issue again as a One-Off"><span class="ui-icon ui-icon-plus"></span>Retry</a>
%endif
%elif weekly['STATUS'] == 'Skipped':
%if weekly['COMICID'] != '' and weekly['COMICID'] is not None:
<a href="#" title="auto-add by ID available for this series" onclick="doAjaxCall('addbyid?comicid=${weekly['COMICID']}&calledby=True',$(this),'table')" data-success="${weekly['COMIC']} is now being added to your wachlist."><span class="ui-icon ui-icon-plus"></span>Add</a>
%else:
@ -111,7 +120,7 @@
%endif
<a href="searchit?name=${weekly['COMIC'] | u}&issue=${weekly['ISSUE']}&mode=pullseries" title="Search for this series to add to your watchlist"><span class="ui-icon ui-icon-plus"></span>Search</a>
%endif
<a href="queueissue?ComicName=${weekly['COMIC'] | u}&ComicIssue=${weekly['ISSUE']}&mode=pullwant&Publisher=${weekly['PUBLISHER']}&pullinfo=${weekinfo['midweek']}" title="Just grab it"><span class="ui-icon ui-icon-plus"></span>One-Off</a>
<a href="#" onclick="doAjaxCall('queueit?ComicName=${weekly['COMIC'] | u}&ComicID=${weekly['COMICID']}&IssueID=${weekly['ISSUEID']}&ComicIssue=${weekly['ISSUE']}&mode=pullwant&Publisher=${weekly['PUBLISHER']}&pullinfo=${weekinfo['midweek']}&pullweek=${weekinfo['weeknumber']}&pullyear=${weekinfo['year']}',$(this),'table')" data-success="Successfully submitted search request for ${weekly['COMIC']} #${weekly['ISSUE']}" title="Just grab it"><span class="ui-icon ui-icon-plus"></span>One-Off</a>
%endif
%endif
</td>

View File

@ -316,7 +316,7 @@ class PostProcessor(object):
return
logger.info('I have located ' + str(filelist['comiccount']) + ' files that I should be able to post-process. Continuing...')
#load the hashes for torrents so continual post-processing of same isseus doesn't occur.
#load the hashes for torrents so continual post-processing of same issues don't occur.
pp_crclist = []
if mylar.ENABLE_TORRENTS:
pp_crc = myDB.select("SELECT a.crc, b.IssueID FROM Snatched as a INNER JOIN issues as b ON a.IssueID=b.IssueID WHERE a.Status='Post-Processed' and a.crc is not NULL and (b.Status='Downloaded' or b.status='Archived ORDER BY b.IssueDate')")
@ -921,6 +921,7 @@ class PostProcessor(object):
nzbiss = myDB.selectone("SELECT * from nzblog WHERE nzbname=? or altnzbname=?", [nzbname, nzbname]).fetchone()
self.oneoff = False
if nzbiss is None:
self._log("Failure - could not initially locate nzbfile in my database to rename.")
logger.fdebug(module + ' Failure - could not locate nzbfile initially')
@ -939,23 +940,31 @@ class PostProcessor(object):
else:
self._log("I corrected and found the nzb as : " + str(nzbname))
logger.fdebug(module + ' Auto-corrected and found the nzb as : ' + str(nzbname))
issueid = nzbiss['IssueID']
else:
issueid = nzbiss['IssueID']
logger.fdebug(module + ' Issueid: ' + str(issueid))
sarc = nzbiss['SARC']
tmpiss = myDB.selectone('SELECT * FROM issues WHERE IssueID=?', [issueid]).fetchone()
#issueid = nzbiss['IssueID']
comicid = None
comicname = None
issuenumber = None
issueid = nzbiss['IssueID']
logger.fdebug(module + ' Issueid: ' + str(issueid))
sarc = nzbiss['SARC']
self.oneoff = nzbiss['OneOff']
tmpiss = myDB.selectone('SELECT * FROM issues WHERE IssueID=?', [issueid]).fetchone()
if tmpiss is not None:
comicid = tmpiss['ComicID']
comicname = tmpiss['ComicName']
issuenumber = tmpiss['Issue_Number']
#use issueid to get publisher, series, year, issue number
comicid = None
comicname = None
issuenumber = None
if tmpiss is not None:
comicid = tmpiss['ComicID']
comicname = tmpiss['ComicName']
issuenumber = tmpiss['Issue_Number']
elif all([self.oneoff is not None, mylar.ALT_PULL == 2]):
oneinfo = myDB.selectone('SELECT * FROM weekly WHERE IssueID=?', [issueid]).fetchone()
if oneinfo is not None:
comicid = oneinfo['ComicID']
comicname = oneinfo['COMIC']
issuenumber = oneinfo['ISSUE']
publisher = oneinfo['PUBLISHER']
self.oneoff = True
logger.info(module + ' Discovered %s # %s by %s [comicid:%s][issueid:%s]' % (comicname, issuenumber, publisher, comicid, issueid))
#use issueid to get publisher, series, year, issue number
annchk = "no"
# if 'annual' in nzbname.lower():
@ -977,7 +986,7 @@ class PostProcessor(object):
sandwich = issueid
elif 'G' in issueid or '-' in issueid:
sandwich = 1
elif issueid >= '90000' or issueid == '1':
elif any([self.oneoff is True, issueid >= '900000', issueid == '1']):
logger.info(module + ' [ONE-OFF POST-PROCESSING] One-off download detected. Post-processing as a non-watchlist item.')
sandwich = None #arbitrarily set it to None just to force one-off downloading below.
else:
@ -1007,7 +1016,7 @@ class PostProcessor(object):
# if sandwich is less than 900000 it's a normal watchlist download. Bypass.
pass
else:
if issuenzb is None or 'S' in sandwich or int(sandwich) >= 900000:
if any([self.oneoff is True, issuenzb is None]) or all([sandwich is not None, 'S' in sandwich]) or int(sandwich) >= 900000:
# this has no issueID, therefore it's a one-off or a manual post-proc.
# At this point, let's just drop it into the Comic Location folder and forget about it..
if sandwich is not None and 'S' in sandwich:
@ -1017,6 +1026,7 @@ class PostProcessor(object):
self._log("One-off mode enabled for Post-Processing. All I'm doing is moving the file untouched into the Grab-bag directory.")
logger.info(module + ' One-off mode enabled for Post-Processing. Will move into Grab-bag directory.')
self._log("Grab-Bag Directory set to : " + mylar.GRABBAG_DIR)
grdst = mylar.GRABBAG_DIR
odir = None
ofilename = None
@ -1070,7 +1080,7 @@ class PostProcessor(object):
#if a one-off download from the pull-list, will not have an issueid associated with it, and will fail to due conversion/tagging.
#if altpull/2 method is being used, issueid may already be present so conversion/tagging is possible with some additional fixes.
if all([mylar.ENABLE_META, sandwich is not None, 'S' in sandwich]):
if all([mylar.ENABLE_META, issueid is not None]):
self._log("Metatagging enabled - proceeding...")
try:
import cmtagmylar
@ -1163,13 +1173,27 @@ class PostProcessor(object):
ctrlVal = {"IssueArcID": issuearcid}
newVal = {"Status": "Downloaded",
"Location": grab_dst}
logger.info('writing: ' + str(newVal) + ' -- ' + str(ctrlVal))
myDB.upsert("readinglist", newVal, ctrlVal)
logger.info('wrote.')
logger.info(module + ' Updated status to Downloaded')
logger.info(module + ' Post-Processing completed for: [' + sarc + '] ' + grab_dst)
self._log(u"Post Processing SUCCESSFUL! ")
elif self.oneoff is True:
logger.info(module + ' IssueID is : ' + str(issueid))
ctrlVal = {"IssueID": issueid}
newVal = {"Status": "Downloaded"}
logger.info(module + ' Writing to db: ' + str(newVal) + ' -- ' + str(ctrlVal))
myDB.upsert("weekly", newVal, ctrlVal)
logger.info(module + ' Updated status to Downloaded')
myDB.upsert("oneoffhistory", newVal, ctrlVal)
logger.info(module + ' Updated history for one-off\'s for tracking purposes')
logger.info(module + ' Post-Processing completed for: [ %s #%s ] %s' % (comicname, issuenumber, grab_dst))
self._log(u"Post Processing SUCCESSFUL! ")
try:
self.sendnotify(comicname, issueyear=None, issuenumOG=issuenumber, annchk=annchk, module=module)
except:
pass
self.valreturn.append({"self.log": self.log,
"mode": 'stop'})
@ -1994,40 +2018,7 @@ class PostProcessor(object):
return self.queue.put(self.valreturn)
if annchk == "no":
prline = series + '(' + issueyear + ') - issue #' + issuenumOG
else:
if 'annual' not in series.lower():
prline = series + ' Annual (' + issueyear + ') - issue #' + issuenumOG
else:
prline = series + ' (' + issueyear + ') - issue #' + issuenumOG
prline2 = 'Mylar has downloaded and post-processed: ' + prline
if mylar.PROWL_ENABLED:
pushmessage = prline
prowl = notifiers.PROWL()
prowl.notify(pushmessage, "Download and Postprocessing completed", module=module)
if mylar.NMA_ENABLED:
nma = notifiers.NMA()
nma.notify(prline=prline, prline2=prline2, module=module)
if mylar.PUSHOVER_ENABLED:
pushover = notifiers.PUSHOVER()
pushover.notify(prline, prline2, module=module)
if mylar.BOXCAR_ENABLED:
boxcar = notifiers.BOXCAR()
boxcar.notify(prline=prline, prline2=prline2, module=module)
if mylar.PUSHBULLET_ENABLED:
pushbullet = notifiers.PUSHBULLET()
pushbullet.notify(prline=prline, prline2=prline2, module=module)
if mylar.TELEGRAM_ENABLED:
telegram = notifiers.TELEGRAM()
telegram.notify(prline, prline2)
self.sendnotify(series, issueyear, issuenumOG, annchk, module)
logger.info(module + ' Post-Processing completed for: ' + series + ' ' + dispiss)
self._log(u"Post Processing SUCCESSFUL! ")
@ -2040,6 +2031,53 @@ class PostProcessor(object):
return self.queue.put(self.valreturn)
def sendnotify(self, series, issueyear, issuenumOG, annchk, module):
if annchk == "no":
if issueyear is None:
prline = series + ' - issue #' + issuenumOG
else:
prline = series + '(' + issueyear + ') - issue #' + issuenumOG
else:
if issueyear is None:
if 'annual' not in series.lower():
prline = series + ' Annual - issue #' + issuenumOG
else:
prline = series + ' - issue #' + issuenumOG
else:
if 'annual' not in series.lower():
prline = series + ' Annual (' + issueyear + ') - issue #' + issuenumOG
else:
prline = series + ' (' + issueyear + ') - issue #' + issuenumOG
prline2 = 'Mylar has downloaded and post-processed: ' + prline
if mylar.PROWL_ENABLED:
pushmessage = prline
prowl = notifiers.PROWL()
prowl.notify(pushmessage, "Download and Postprocessing completed", module=module)
if mylar.NMA_ENABLED:
nma = notifiers.NMA()
nma.notify(prline=prline, prline2=prline2, module=module)
if mylar.PUSHOVER_ENABLED:
pushover = notifiers.PUSHOVER()
pushover.notify(prline, prline2, module=module)
if mylar.BOXCAR_ENABLED:
boxcar = notifiers.BOXCAR()
boxcar.notify(prline=prline, prline2=prline2, module=module)
if mylar.PUSHBULLET_ENABLED:
pushbullet = notifiers.PUSHBULLET()
pushbullet.notify(prline=prline, prline2=prline2, module=module)
if mylar.TELEGRAM_ENABLED:
telegram = notifiers.TELEGRAM()
telegram.notify(prline, prline2)
return
class FolderCheck():

View File

@ -520,7 +520,7 @@ def initialize():
STORYARCDIR, COPY2ARCDIR, ARC_FOLDERFORMAT, ARC_FILEOPS, CVURL, CV_VERIFY, CHECK_FOLDER, ENABLE_CHECK_FOLDER, \
COMIC_LOCATION, QUAL_ALTVERS, QUAL_SCANNER, QUAL_TYPE, QUAL_QUALITY, ENABLE_EXTRA_SCRIPTS, EXTRA_SCRIPTS, ENABLE_SNATCH_SCRIPT, SNATCH_SCRIPT, ENABLE_PRE_SCRIPTS, PRE_SCRIPTS, PULLNEW, ALT_PULL, PULLBYFILE, COUNT_ISSUES, COUNT_HAVES, COUNT_COMICS, \
SYNO_FIX, ENFORCE_PERMS, CHMOD_FILE, CHMOD_DIR, CHOWNER, CHGROUP, ANNUALS_ON, CV_ONLY, CV_ONETIMER, CURRENT_WEEKNUMBER, CURRENT_YEAR, PULL_REFRESH, WEEKFOLDER, WEEKFOLDER_LOC, WEEKFOLDER_FORMAT, UMASK, \
TELEGRAM_ENABLED, TELEGRAM_TOKEN, TELEGRAM_USERID
TELEGRAM_ENABLED, TELEGRAM_TOKEN, TELEGRAM_USERID, TELEGRAM_ONSNATCH
if __INITIALIZED__:
return False
@ -1802,7 +1802,7 @@ def dbcheck():
c.execute('CREATE TABLE IF NOT EXISTS issues (IssueID TEXT, ComicName TEXT, IssueName TEXT, Issue_Number TEXT, DateAdded TEXT, Status TEXT, Type TEXT, ComicID TEXT, ArtworkURL Text, ReleaseDate TEXT, Location TEXT, IssueDate TEXT, Int_IssueNumber INT, ComicSize TEXT, AltIssueNumber TEXT, IssueDate_Edit TEXT, ImageURL TEXT, ImageURL_ALT TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS snatched (IssueID TEXT, ComicName TEXT, Issue_Number TEXT, Size INTEGER, DateAdded TEXT, Status TEXT, FolderName TEXT, ComicID TEXT, Provider TEXT, Hash TEXT, crc TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS upcoming (ComicName TEXT, IssueNumber TEXT, ComicID TEXT, IssueID TEXT, IssueDate TEXT, Status TEXT, DisplayComicName TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS nzblog (IssueID TEXT, NZBName TEXT, SARC TEXT, PROVIDER TEXT, ID TEXT, AltNZBName TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS nzblog (IssueID TEXT, NZBName TEXT, SARC TEXT, PROVIDER TEXT, ID TEXT, AltNZBName TEXT, OneOff TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS weekly (SHIPDATE TEXT, PUBLISHER TEXT, ISSUE TEXT, COMIC VARCHAR(150), EXTRA TEXT, STATUS TEXT, ComicID TEXT, IssueID TEXT, CV_Last_Update TEXT, DynamicName TEXT, weeknumber TEXT, year TEXT, rowid INTEGER PRIMARY KEY)')
# c.execute('CREATE TABLE IF NOT EXISTS sablog (nzo_id TEXT, ComicName TEXT, ComicYEAR TEXT, ComicIssue TEXT, name TEXT, nzo_complete TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS importresults (impID TEXT, ComicName TEXT, ComicYear TEXT, Status TEXT, ImportDate TEXT, ComicFilename TEXT, ComicLocation TEXT, WatchMatch TEXT, DisplayName TEXT, SRID TEXT, ComicID TEXT, IssueID TEXT, Volume TEXT, IssueNumber TEXT, DynamicName TEXT)')
@ -1814,6 +1814,7 @@ def dbcheck():
c.execute('CREATE TABLE IF NOT EXISTS failed (ID TEXT, Status TEXT, ComicID TEXT, IssueID TEXT, Provider TEXT, ComicName TEXT, Issue_Number TEXT, NZBName TEXT, DateFailed TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS searchresults (SRID TEXT, results Numeric, Series TEXT, publisher TEXT, haveit TEXT, name TEXT, deck TEXT, url TEXT, description TEXT, comicid TEXT, comicimage TEXT, issues TEXT, comicyear TEXT, ogcname TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS ref32p (ComicID TEXT UNIQUE, ID TEXT, Series TEXT, Updated TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS oneoffhistory (ComicName TEXT, IssueNumber TEXT, ComicID TEXT, IssueID TEXT, Status TEXT, weeknumber TEXT, year TEXT)')
conn.commit
c.close
#new
@ -2120,6 +2121,10 @@ def dbcheck():
except sqlite3.OperationalError:
c.execute('ALTER TABLE nzblog ADD COLUMN AltNZBName TEXT')
try:
c.execute('SELECT OneOff from nzblog')
except sqlite3.OperationalError:
c.execute('ALTER TABLE nzblog ADD COLUMN OneOff TEXT')
## -- Annuals Table --
try:

View File

@ -43,17 +43,27 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen
return "fail"
#make use of temporary file location in order to post-process this to ensure that things don't get hammered when converting
new_filepath = None
new_folder = None
try:
import tempfile
new_folder = os.path.join(tempfile.mkdtemp(prefix='mylar_', dir=mylar.CACHE_DIR)) #prefix, suffix, dir
logger.info('Filepath: %s' %filepath)
logger.info('Filename: %s' %filename)
new_folder = tempfile.mkdtemp(prefix='mylar_', dir=mylar.CACHE_DIR) #prefix, suffix, dir
logger.info('New_Folder: %s' % new_folder)
new_filepath = os.path.join(new_folder, filename)
logger.info('New_Filepath: %s' % new_filepath)
if mylar.FILE_OPTS == 'copy' and manualmeta == False:
logger.info('Attempting to copy: %s' % mylar.FILE_OPTS)
shutil.copy(filepath, new_filepath)
else:
logger.info('Attempting to move: %s' % mylar.FILE_OPTS)
shutil.move(filepath, new_filepath)
filepath = new_filepath
except:
logger.warn(module + ' Unexpected Error: %s' % sys.exc_info()[0])
logger.warn(module + ' Unable to create temporary directory to perform meta-tagging. Processing without metatagging.')
tidyup(filepath, new_filepath, new_folder)
return "fail"
## Sets up other directories ##
@ -97,6 +107,7 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen
except subprocess.CalledProcessError as e:
#logger.warn(module + "[WARNING] "command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
logger.warn(module + '[WARNING] Make sure that you are using the comictagger included with Mylar.')
tidyup(filepath, new_filepath, new_folder)
return "fail"
ctend = ctversion.find('\n')
@ -128,6 +139,7 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen
if tagcnt == 0:
logger.warn(module + ' You have metatagging enabled, but you have not selected the type(s) of metadata to write. Please fix and re-run manually')
tidyup(filepath, new_filepath, new_folder)
return "fail"
#if it's a cbz file - check if no-overwrite existing tags is enabled / disabled in config.
@ -216,6 +228,7 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen
elif initial_ctrun:
logger.warn(module + '[COMIC-TAGGER][CBR-TO-CBZ] Failed to convert cbr to cbz - check permissions on folder : ' + mylar.CACHE_DIR + ' and/or the location where Mylar is trying to tag the files from.')
initial_ctrun = False
tidyup(filepath, new_filepath, new_folder)
return 'fail'
elif 'Cannot find' in out:
logger.warn(module + '[COMIC-TAGGER] Unable to locate file: ' + filename)
@ -230,9 +243,19 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen
i+=1
except OSError, e:
logger.warn(module + '[COMIC-TAGGER] Unable to run comictagger with the options provided: ' + re.sub(f_tagoptions[f_tagoptions.index(mylar.COMICVINE_API)], 'REDACTED', str(script_cmd)))
tidyup(filepath, new_filepath, new_folder)
return "fail"
if mylar.CBR2CBZ_ONLY and initial_ctrun == False:
break
return filepath
def tidyup(filepath, new_filepath, new_folder):
if all([new_filepath is not None, new_folder is not None]):
if all([os.path.exists(new_folder), os.path.isfile(filepath)]):
shutil.remove(new_folder)
elif os.path.exists(new_filepath) and not os.path.exists(filepath):
shutil.move(new_filepath, filepath)

View File

@ -1807,6 +1807,22 @@ def listStoryArcs():
library[row['CV_ArcID']] = row['CV_ArcID']
return library
def listoneoffs(weeknumber, year):
import db
library = []
myDB = db.DBConnection()
# Get Distinct one-off issues from the pullist that have already been downloaded / snatched
list = myDB.select("SELECT DISTINCT(IssueID), Status, ComicID, ComicName, Status, IssueNumber FROM oneoffhistory WHERE weeknumber=? and year=? AND Status='Downloaded' OR Status='Snatched'", [weeknumber, year])
for row in list:
library.append({'IssueID': row['IssueID'],
'ComicID': row['ComicID'],
'ComicName': row['ComicName'],
'IssueNumber': row['IssueNumber'],
'Status': row['Status'],
'weeknumber': weeknumber,
'year': year})
return library
def manualArc(issueid, reading_order, storyarcid):
import db
if issueid.startswith('4000-'):
@ -2660,6 +2676,7 @@ def torrentinfo(issueid=None, torrent_hash=None, download=False, monitor=False):
downlocation = os.path.join(torrent_folder, torrent_info['name'])
downlocation = re.sub("'", "\\'", downlocation)
downlocation = re.sub("&", "\\&", downlocation)
script_cmd = shlex.split(curScriptName, posix=False) + [downlocation]
logger.fdebug(u"Executing command " +str(script_cmd))
@ -2825,12 +2842,16 @@ def script_env(mode, vars):
if mode == 'on-snatch':
runscript = mylar.SNATCH_SCRIPT
if 'torrentinfo' in vars:
os.environ['mylar_release_hash'] = vars['torrentinfo']['hash']
os.environ['mylar_release_name'] = vars['torrentinfo']['name']
os.environ['mylar_release_folder'] = vars['torrentinfo']['folder']
if 'hash' in vars['torrentinfo']:
os.environ['mylar_release_hash'] = vars['torrentinfo']['hash']
if 'name' in vars['torrentinfo']:
os.environ['mylar_release_name'] = vars['torrentinfo']['name']
if 'folder' in vars['torrentinfo']:
os.environ['mylar_release_folder'] = vars['torrentinfo']['folder']
if 'label' in vars['torrentinfo']:
os.environ['mylar_release_label'] = vars['torrentinfo']['label']
os.environ['mylar_release_filesize'] = str(vars['torrentinfo']['total_filesize'])
if 'total_filesize' in vars['torrentinfo']:
os.environ['mylar_release_filesize'] = str(vars['torrentinfo']['total_filesize'])
if 'time_started' in vars['torrentinfo']:
os.environ['mylar_release_start'] = str(vars['torrentinfo']['time_started'])
if 'filepath' in vars['torrentinfo']:

View File

@ -88,8 +88,10 @@ class NMA:
self.TEST_NMA_URL = "https://www.notifymyandroid.com/publicapi/verify"
if test_apikey is None:
self.apikey = mylar.NMA_APIKEY
self.test = False
else:
self.apikey = test_apikey
self.test = True
self.priority = mylar.NMA_PRIORITY
self._session = requests.Session()
@ -118,7 +120,10 @@ class NMA:
'message': '[' + str(error_code) + '] ' + error_message}
else:
logger.info(module + '[' + str(success_code) + '] NotifyMyAndroid apikey valid. Test notification sent successfully.')
if self.test is True:
logger.info(module + '[' + str(success_code) + '] NotifyMyAndroid apikey valid. Test notification sent successfully.')
else:
logger.info(module + '[' + str(success_code) + '] NotifyMyAndroid notification sent successfully.')
return {'status': True,
'message': 'APIKEY verified OK / notification sent'}
elif r.status_code >= 400 and r.status_code < 500:

View File

@ -281,6 +281,11 @@ def torrents(pickfeed=None, seriesname=None, issue=None, feedinfo=None):
seeddigits = 0
#if '0-Day Comics Pack' in series:
# logger.info('Comic Pack detected : ' + series)
# itd = True
if int(mylar.MINSEEDS) >= int(seeddigits):
#new releases has it as '&id', notification feeds have it as %ampid (possibly even &amp;id
link = feedme.entries[i].link
@ -462,10 +467,10 @@ def rssdbupdate(feeddata, i, type):
return
def torrentdbsearch(seriesname, issue, comicid=None, nzbprov=None):
def torrentdbsearch(seriesname, issue, comicid=None, nzbprov=None, oneoff=False):
myDB = db.DBConnection()
seriesname_alt = None
if comicid is None or comicid == 'None':
if any([comicid is None, comicid == 'None', oneoff is True]):
pass
else:
logger.fdebug('ComicID: ' + str(comicid))
@ -658,10 +663,10 @@ def torrentdbsearch(seriesname, issue, comicid=None, nzbprov=None):
return torinfo
def nzbdbsearch(seriesname, issue, comicid=None, nzbprov=None, searchYear=None, ComicVersion=None):
def nzbdbsearch(seriesname, issue, comicid=None, nzbprov=None, searchYear=None, ComicVersion=None, oneoff=False):
myDB = db.DBConnection()
seriesname_alt = None
if comicid is None or comicid == 'None':
if any([comicid is None, comicid == 'None', oneoff is True]):
pass
else:
snm = myDB.selectone("SELECT * FROM comics WHERE comicid=?", [comicid]).fetchone()
@ -1007,13 +1012,16 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site):
logger.fdebug('[' + site + '] Saved torrent file to : ' + filepath)
if mylar.USE_UTORRENT:
uTC = utorrent.utorrentclient()
torrent_info = uTC.addfile(filepath, filename)
if torrent_info:
ti = uTC.addfile(filepath, filename)
if ti == 'fail':
return ti
else:
#if ti is value, it will return the hash
torrent_info = []
torrent_info['hash'] = ti
torrent_info['clientmode'] = 'utorrent'
torrent_info['link'] = linkit
return torrent_info
else:
return "fail"
elif mylar.USE_RTORRENT:
import test

View File

@ -75,14 +75,16 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
ComicName = ComicName + " annual"
if AlternateSearch is not None and AlternateSearch != "None":
AlternateSearch = AlternateSearch + " annual"
oneoff = False
if IssueID is None:
if mode == 'pullwant' or IssueID is None:
#one-off the download.
logger.fdebug('One-Off Search parameters:')
logger.fdebug("ComicName: " + ComicName)
logger.fdebug("Issue: " + str(IssueNumber))
logger.fdebug("Year: " + str(ComicYear))
logger.fdebug("IssueDate:" + str(IssueDate))
oneoff = True
if SARC:
logger.fdebug("Story-ARC Search parameters:")
logger.fdebug("Story-ARC: " + str(SARC))
@ -237,7 +239,7 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
prov_count+=1
continue
if searchmode == 'rss':
findit = NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, searchprov, send_prov_count, IssDateFix, IssueID, UseFuzzy, newznab_host, ComicVersion=ComicVersion, SARC=SARC, IssueArcID=IssueArcID, RSS="yes", ComicID=ComicID, issuetitle=issuetitle, unaltered_ComicName=unaltered_ComicName)
findit = NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, searchprov, send_prov_count, IssDateFix, IssueID, UseFuzzy, newznab_host, ComicVersion=ComicVersion, SARC=SARC, IssueArcID=IssueArcID, RSS="yes", ComicID=ComicID, issuetitle=issuetitle, unaltered_ComicName=unaltered_ComicName, oneoff=oneoff)
if findit['status'] is False:
if AlternateSearch is not None and AlternateSearch != "None":
chkthealt = AlternateSearch.split('##')
@ -247,7 +249,7 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
for calt in chkthealt:
AS_Alternate = re.sub('##', '', calt)
logger.info(u"Alternate Search pattern detected...re-adjusting to : " + str(AS_Alternate))
findit = NZB_SEARCH(AS_Alternate, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, searchprov, send_prov_count, IssDateFix, IssueID, UseFuzzy, newznab_host, ComicVersion=ComicVersion, SARC=SARC, IssueArcID=IssueArcID, RSS="yes", ComicID=ComicID, issuetitle=issuetitle, unaltered_ComicName=AS_Alternate, allow_packs=allow_packs)
findit = NZB_SEARCH(AS_Alternate, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, searchprov, send_prov_count, IssDateFix, IssueID, UseFuzzy, newznab_host, ComicVersion=ComicVersion, SARC=SARC, IssueArcID=IssueArcID, RSS="yes", ComicID=ComicID, issuetitle=issuetitle, unaltered_ComicName=AS_Alternate, allow_packs=allow_packs, oneoff=oneoff)
if findit['status'] is True:
break
if findit['status'] is True:
@ -257,7 +259,7 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
break
else:
findit = NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, searchprov, send_prov_count, IssDateFix, IssueID, UseFuzzy, newznab_host, ComicVersion=ComicVersion, SARC=SARC, IssueArcID=IssueArcID, RSS="no", ComicID=ComicID, issuetitle=issuetitle, unaltered_ComicName=unaltered_ComicName, allow_packs=allow_packs)
findit = NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, searchprov, send_prov_count, IssDateFix, IssueID, UseFuzzy, newznab_host, ComicVersion=ComicVersion, SARC=SARC, IssueArcID=IssueArcID, RSS="no", ComicID=ComicID, issuetitle=issuetitle, unaltered_ComicName=unaltered_ComicName, allow_packs=allow_packs, oneoff=oneoff)
if findit['status'] is False:
if AlternateSearch is not None and AlternateSearch != "None":
chkthealt = AlternateSearch.split('##')
@ -267,7 +269,7 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
for calt in chkthealt:
AS_Alternate = re.sub('##', '', calt)
logger.info(u"Alternate Search pattern detected...re-adjusting to : " + str(AS_Alternate))
findit = NZB_SEARCH(AS_Alternate, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, searchprov, send_prov_count, IssDateFix, IssueID, UseFuzzy, newznab_host, ComicVersion=ComicVersion, SARC=SARC, IssueArcID=IssueArcID, RSS="no", ComicID=ComicID, issuetitle=issuetitle, unaltered_ComicName=unaltered_ComicName, allow_packs=allow_packs)
findit = NZB_SEARCH(AS_Alternate, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, searchprov, send_prov_count, IssDateFix, IssueID, UseFuzzy, newznab_host, ComicVersion=ComicVersion, SARC=SARC, IssueArcID=IssueArcID, RSS="no", ComicID=ComicID, issuetitle=issuetitle, unaltered_ComicName=unaltered_ComicName, allow_packs=allow_packs, oneoff=oneoff)
if findit['status'] is True:
break
if findit['status'] is True:
@ -285,7 +287,7 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
if findit['status'] is True:
#check for snatched_havetotal being enabled here and adjust counts now.
#IssueID being the catch/check for one-offs as they won't exist on the watchlist and error out otherwise.
if mylar.SNATCHED_HAVETOTAL and IssueID is not None:
if mylar.SNATCHED_HAVETOTAL and any([oneoff is False, IssueID is not None]):
logger.fdebug('Adding this to the HAVE total for the series.')
helpers.incr_snatched(ComicID)
if searchprov == 'TPSE' and mylar.TMP_PROV != searchprov:
@ -307,7 +309,7 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
return findit, 'None'
def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, nzbprov, prov_count, IssDateFix, IssueID, UseFuzzy, newznab_host=None, ComicVersion=None, SARC=None, IssueArcID=None, RSS=None, ComicID=None, issuetitle=None, unaltered_ComicName=None, allow_packs=None):
def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, nzbprov, prov_count, IssDateFix, IssueID, UseFuzzy, newznab_host=None, ComicVersion=None, SARC=None, IssueArcID=None, RSS=None, ComicID=None, issuetitle=None, unaltered_ComicName=None, allow_packs=None, oneoff=False):
if any([allow_packs is None, allow_packs == 'None', allow_packs == 0, allow_packs == '0']) and all([mylar.ENABLE_TORRENT_SEARCH, mylar.ENABLE_32P]):
allow_packs = False
@ -527,7 +529,7 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
if nzbprov == '32P' or nzbprov == 'TPSE':
cmname = re.sub("%20", " ", str(comsrc))
logger.fdebug("Sending request to [" + str(nzbprov) + "] RSS for " + ComicName + " : " + str(mod_isssearch))
bb = rsscheck.torrentdbsearch(ComicName, mod_isssearch, ComicID, nzbprov)
bb = rsscheck.torrentdbsearch(ComicName, mod_isssearch, ComicID, nzbprov, oneoff)
rss = "yes"
else:
cmname = re.sub("%20", " ", str(comsrc))
@ -535,7 +537,7 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
if nzbprov == 'newznab':
nzbprov_fix = name_newznab
else: nzbprov_fix = nzbprov
bb = rsscheck.nzbdbsearch(findcomic, mod_isssearch, ComicID, nzbprov_fix, ComicYear, ComicVersion)
bb = rsscheck.nzbdbsearch(findcomic, mod_isssearch, ComicID, nzbprov_fix, ComicYear, ComicVersion, oneoff)
rss = "yes"
if bb is None:
bb = 'no results'
@ -1268,7 +1270,8 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
"pack": True,
"pack_numbers": pack_issuelist,
"pack_issuelist": issueid_info,
"modcomicname": entry['title']})
"modcomicname": entry['title'],
"oneoff": oneoff})
downloadit = True
else:
@ -1337,6 +1340,9 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
#if issue_except: comic_iss = re.sub(issue_except.lower(), '', comic_iss)
logger.fdebug("adjusted nzb comic and issue: " + comic_iss)
if comic_iss is None or comic_iss == '':
continue
splitit = comic_iss.split(None)
#something happened to dognzb searches or results...added a '.' in place of spaces
#screwed up most search results with dognzb. Let's try to adjust.
@ -1586,7 +1592,8 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
"pack": False,
"pack_numbers": None,
"pack_issuelist": None,
"modcomicname": modcomicname})
"modcomicname": modcomicname,
"oneoff": oneoff})
else:
log2file = log2file + "issues don't match.." + "\n"
downloadit = False
@ -1609,8 +1616,10 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
searchresult = searcher(nzbprov, nzbname, comicinfo, entry['link'], IssueID, ComicID, tmpprov, newznab=newznab_host)
if searchresult == 'downloadchk-fail':
foundc['status'] = False
continue
elif searchresult == 'torrent-fail' or searchresult == 'nzbget-fail' or searchresult == 'sab-fail' or searchresult == 'blackhole-fail':
foundc['status'] = False
return foundc
#nzbid, nzbname, sent_to
@ -1644,20 +1653,20 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
logger.fdebug("Found matching comic within pack...preparing to send to Updater with IssueIDs: " + str(issueid_info) + " and nzbname of " + str(nzbname))
#because packs need to have every issue that's not already Downloaded in a Snatched status, throw it to the updater here as well.
for isid in issinfo['issues']:
updater.nzblog(isid['issueid'], nzbname, ComicName, SARC=SARC, IssueArcID=IssueArcID, id=nzbid, prov=tmpprov)
updater.nzblog(isid['issueid'], nzbname, ComicName, SARC=SARC, IssueArcID=IssueArcID, id=nzbid, prov=tmpprov, oneoff=oneoff)
updater.foundsearch(ComicID, isid['issueid'], mode='series', provider=tmpprov)
notify_snatch(nzbname, sent_to, comicinfo[0]['modcomicname'], comicinfo[0]['comyear'], comicinfo[0]['pack_numbers'], nzbprov)
else:
if alt_nzbname is None or alt_nzbname == '':
logger.fdebug("Found matching comic...preparing to send to Updater with IssueID: " + str(IssueID) + " and nzbname: " + str(nzbname))
if '[RSS]' in tmpprov: tmpprov = re.sub('\[RSS\]', '', tmpprov).strip()
updater.nzblog(IssueID, nzbname, ComicName, SARC=SARC, IssueArcID=IssueArcID, id=nzbid, prov=tmpprov)
updater.nzblog(IssueID, nzbname, ComicName, SARC=SARC, IssueArcID=IssueArcID, id=nzbid, prov=tmpprov, oneoff=oneoff)
else:
logger.fdebug("Found matching comic...preparing to send to Updater with IssueID: " + str(IssueID) + " and nzbname: " + str(nzbname) + '[' + alt_nzbname + ']')
if '[RSS]' in tmpprov: tmpprov = re.sub('\[RSS\]', '', tmpprov).strip()
updater.nzblog(IssueID, nzbname, ComicName, SARC=SARC, IssueArcID=IssueArcID, id=nzbid, prov=tmpprov, alt_nzbname=alt_nzbname)
updater.nzblog(IssueID, nzbname, ComicName, SARC=SARC, IssueArcID=IssueArcID, id=nzbid, prov=tmpprov, alt_nzbname=alt_nzbname, oneoff=oneoff)
#send out the notifications for the snatch.
if IssueID is None:
if any([oneoff is True, IssueID is None]):
cyear = ComicYear
else:
cyear = comyear
@ -1974,6 +1983,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
IssueNumber = comicinfo[0]['IssueNumber']
comyear = comicinfo[0]['comyear']
modcomicname = comicinfo[0]['modcomicname']
oneoff = comicinfo[0]['oneoff']
#setup the priorities.
if mylar.SAB_PRIORITY:
@ -2006,7 +2016,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
if comicinfo[0]['pack'] == True:
logger.info(u"Found " + ComicName + " (" + str(comyear) + ") issue: " + str(IssueNumber) + " using " + str(tmpprov) + " within a pack containing issues: " + comicinfo[0]['pack_numbers'])
else:
if IssueID is None:
if any([oneoff is True, IssueID is None]):
#one-off information
logger.fdebug("ComicName: " + ComicName)
logger.fdebug("Issue: " + str(IssueNumber))
@ -2017,7 +2027,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
logger.fdebug("link given by: " + str(nzbprov))
if mylar.FAILED_DOWNLOAD_HANDLING:
if all([nzbid is not None, IssueID is not None]):
if all([nzbid is not None, IssueID is not None, oneoff is False]):
# --- this causes any possible snatch to get marked as a Failed download when doing a one-off search...
#try:
# # only nzb providers will have a filen, try it and pass exception
@ -2247,7 +2257,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
if rcheck == "fail":
if mylar.FAILED_DOWNLOAD_HANDLING:
logger.error('Unable to send torrent to client. Assuming incomplete link - sending to Failed Handler and continuing search.')
if IssueID is None:
if any([oneoff is True, IssueID is None]):
logger.fdebug('One-off mode was initiated - Failed Download handling for : ' + ComicName + ' #' + str(IssueNumber))
comicinfo = {"ComicName": ComicName,
"IssueNumber": IssueNumber}
@ -2538,11 +2548,11 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
if alt_nzbname is None or alt_nzbname == '':
logger.fdebug("Found matching comic...preparing to send to Updater with IssueID: " + str(IssueID) + " and nzbname: " + str(nzbname))
if '[RSS]' in tmpprov: tmpprov = re.sub('\[RSS\]', '', tmpprov).strip()
updater.nzblog(IssueID, nzbname, ComicName, SARC=None, IssueArcID=None, id=nzbid, prov=tmpprov)
updater.nzblog(IssueID, nzbname, ComicName, SARC=None, IssueArcID=None, id=nzbid, prov=tmpprov, oneoff=oneoff)
else:
logger.fdebug("Found matching comic...preparing to send to Updater with IssueID: " + str(IssueID) + " and nzbname: " + str(nzbname) + ' [' + alt_nzbname + ']')
if '[RSS]' in tmpprov: tmpprov = re.sub('\[RSS\]', '', tmpprov).strip()
updater.nzblog(IssueID, nzbname, ComicName, SARC=None, IssueArcID=None, id=nzbid, prov=tmpprov, alt_nzbname=alt_nzbname)
updater.nzblog(IssueID, nzbname, ComicName, SARC=None, IssueArcID=None, id=nzbid, prov=tmpprov, alt_nzbname=alt_nzbname, oneoff=oneoff)
#send out notifications for on snatch after the updater incase notification fails (it would bugger up the updater/pp scripts)
notify_snatch(nzbname, sent_to, helpers.filesafe(modcomicname), comyear, IssueNumber, nzbprov)
mylar.TMP_PROV = nzbprov

View File

@ -74,6 +74,7 @@ class TorrentClient(object):
file_path = f.path
torrent_files.append(file_path)
torrent_info = {
'hash': torrent.info_hash,
'name': torrent.name,
@ -102,6 +103,14 @@ class TorrentClient(object):
if not torrent:
return False
#we can cherrypick the torrents here if required and if it's a pack (0day instance)
#torrent.get_files() will return list of files in torrent
#f.set_priority(0,1,2)
for f in torrent.get_files():
logger.info('torrent_get_files: %s' % f)
# f.set_priority(0) #set them to not download just to see if this works...
#torrent.updated_priorities()
if mylar.RTORRENT_LABEL:
torrent.set_custom(1, mylar.RTORRENT_LABEL)
logger.info('Setting label for torrent to : ' + mylar.RTORRENT_LABEL)

View File

@ -599,7 +599,7 @@ def no_searchresults(ComicID):
"LatestIssue": "Error"}
myDB.upsert("comics", newValue, controlValue)
def nzblog(IssueID, NZBName, ComicName, SARC=None, IssueArcID=None, id=None, prov=None, alt_nzbname=None):
def nzblog(IssueID, NZBName, ComicName, SARC=None, IssueArcID=None, id=None, prov=None, alt_nzbname=None, oneoff=False):
myDB = db.DBConnection()
newValue = {'NZBName': NZBName}
@ -609,6 +609,10 @@ def nzblog(IssueID, NZBName, ComicName, SARC=None, IssueArcID=None, id=None, pro
IssueID = 'S' + str(IssueArcID)
newValue['SARC'] = SARC
if oneoff is True:
logger.fdebug('One-Off download detected when updating - crossing the t\'s and dotting the i\'s so things work...')
newValue['OneOff'] = True
if IssueID is None or IssueID == 'None':
#if IssueID is None, it's a one-off download from the pull-list.
#give it a generic ID above the last one so it doesn't throw an error later.
@ -644,7 +648,7 @@ def nzblog(IssueID, NZBName, ComicName, SARC=None, IssueArcID=None, id=None, pro
myDB.upsert("nzblog", newValue, controlValue)
def foundsearch(ComicID, IssueID, mode=None, down=None, provider=None, SARC=None, IssueArcID=None, module=None, hash=None, crc=None):
def foundsearch(ComicID, IssueID, mode=None, down=None, provider=None, SARC=None, IssueArcID=None, module=None, hash=None, crc=None, comicname=None, issuenumber=None, pullinfo=None):
# When doing a Force Search (Wanted tab), the resulting search calls this to update.
# this is all redudant code that forceRescan already does.
@ -661,22 +665,35 @@ def foundsearch(ComicID, IssueID, mode=None, down=None, provider=None, SARC=None
logger.fdebug(module + ' comicid: ' + str(ComicID))
logger.fdebug(module + ' issueid: ' + str(IssueID))
if mode != 'story_arc':
comic = myDB.selectone('SELECT * FROM comics WHERE ComicID=?', [ComicID]).fetchone()
ComicName = comic['ComicName']
if mode == 'want_ann':
issue = myDB.selectone('SELECT * FROM annuals WHERE IssueID=?', [IssueID]).fetchone()
if ComicName != issue['ReleaseComicName'] + ' Annual':
ComicName = issue['ReleaseComicName']
modcomicname = True
else:
issue = myDB.selectone('SELECT * FROM issues WHERE IssueID=?', [IssueID]).fetchone()
CYear = issue['IssueDate'][:4]
if mode != 'pullwant':
if mode != 'story_arc':
comic = myDB.selectone('SELECT * FROM comics WHERE ComicID=?', [ComicID]).fetchone()
ComicName = comic['ComicName']
if mode == 'want_ann':
issue = myDB.selectone('SELECT * FROM annuals WHERE IssueID=?', [IssueID]).fetchone()
if ComicName != issue['ReleaseComicName'] + ' Annual':
ComicName = issue['ReleaseComicName']
modcomicname = True
else:
issue = myDB.selectone('SELECT * FROM issues WHERE IssueID=?', [IssueID]).fetchone()
CYear = issue['IssueDate'][:4]
IssueNum = issue['Issue_Number']
else:
issue = myDB.selectone('SELECT * FROM readinglist WHERE IssueArcID=?', [IssueArcID]).fetchone()
ComicName = issue['ComicName']
CYear = issue['IssueYEAR']
IssueNum = issue['IssueNumber']
else:
issue = myDB.selectone('SELECT * FROM readinglist WHERE IssueArcID=?', [IssueArcID]).fetchone()
ComicName = issue['ComicName']
CYear = issue['IssueYEAR']
oneinfo = myDB.selectone('SELECT * FROM weekly WHERE IssueID=?', [IssueID]).fetchone()
if oneinfo is None:
ComicName = comicname
IssueNum = issuenumber
onefail = True
else:
ComicName = oneinfo['COMIC']
IssueNum = oneinfo['ISSUE']
onefail = False
if down is None:
# update the status to Snatched (so it won't keep on re-downloading!)
@ -701,7 +718,8 @@ def foundsearch(ComicID, IssueID, mode=None, down=None, provider=None, SARC=None
myDB.upsert("annuals", newValue, controlValue)
else:
controlValue = {"IssueID": IssueID}
myDB.upsert("issues", newValue, controlValue)
if mode != 'pullwant':
myDB.upsert("issues", newValue, controlValue)
# update the snatched DB
snatchedupdate = {"IssueID": IssueID,
@ -718,7 +736,10 @@ def foundsearch(ComicID, IssueID, mode=None, down=None, provider=None, SARC=None
"Status": "Snatched",
"Hash": hash
}
else:
myDB.upsert("snatched", newsnatchValues, snatchedupdate)
elif mode != 'pullwant':
if modcomicname:
IssueNum = issue['Issue_Number']
else:
@ -734,18 +755,43 @@ def foundsearch(ComicID, IssueID, mode=None, down=None, provider=None, SARC=None
"Status": "Snatched",
"Hash": hash
}
myDB.upsert("snatched", newsnatchValues, snatchedupdate)
#this will update the weeklypull list immediately after sntaching to reflect the new status.
myDB.upsert("snatched", newsnatchValues, snatchedupdate)
else:
#updating snatched table with one-off is abit difficult due to lack of complete information in some instances
#ie. alt_pull 2 not populated yet, alt_pull 0 method in general doesn't have enough info....
newsnatchValues = {"ComicName": ComicName,
"ComicID": ComicID,
"IssueID": IssueID,
"Issue_Number": IssueNum,
"DateAdded": helpers.now(),
"Status": "Snatched",
"Hash": hash
}
myDB.upsert("snatched", newsnatchValues, snatchedupdate)
#this will update the weeklypull list immediately after snatching to reflect the new status.
#-is ugly, should be linked directly to other table (IssueID should be populated in weekly pull at this point hopefully).
chkit = myDB.selectone("SELECT * FROM weekly WHERE ComicID=? AND IssueID=?", [ComicID, IssueID]).fetchone()
if chkit is not None:
comicname = chkit['COMIC']
issue = chkit['ISSUE']
ctlVal = {"ComicID": ComicID,
"IssueID": IssueID}
newVal = {"Status": "Snatched"}
myDB.upsert("weekly", newVal, ctlVal)
myDB.upsert("weekly", newValue, ctlVal)
newValue['IssueNumber'] = issue
newValue['ComicName'] = comicname
newValue['Status'] = "Snatched"
if pullinfo is not None:
newValue['weeknumber'] = pullinfo['weeknumber']
newValue['year'] = pullinfo['year']
myDB.upsert("oneoffhistory", newValue, ctlVal)
logger.info(module + ' Updated the status (Snatched) complete for ' + ComicName + ' Issue: ' + str(IssueNum))
else:
@ -756,15 +802,10 @@ def foundsearch(ComicID, IssueID, mode=None, down=None, provider=None, SARC=None
logger.info(module + ' Setting status to Downloaded in history.')
downstatus = 'Downloaded'
if mode == 'want_ann':
if modcomicname:
IssueNum = issue['Issue_Number']
else:
IssueNum = "Annual " + issue['Issue_Number']
if not modcomicname:
IssueNum = "Annual " + IssueNum
elif mode == 'story_arc':
IssueNum = issue['IssueNumber']
IssueID = IssueArcID
else:
IssueNum = issue['Issue_Number']
snatchedupdate = {"IssueID": IssueID,
"Status": downstatus,
@ -784,7 +825,7 @@ def foundsearch(ComicID, IssueID, mode=None, down=None, provider=None, SARC=None
nValue = {"Status": "Downloaded"}
myDB.upsert("readinglist", nValue, cValue)
else:
elif mode != 'pullwant':
controlValue = {"IssueID": IssueID}
newValue = {"Status": "Downloaded"}
if mode == 'want_ann':
@ -796,12 +837,22 @@ def foundsearch(ComicID, IssueID, mode=None, down=None, provider=None, SARC=None
chkit = myDB.selectone("SELECT * FROM weekly WHERE ComicID=? AND IssueID=? AND Status='Snatched'", [ComicID, IssueID]).fetchone()
if chkit is not None:
comicname = chkit['COMIC']
issue = chkit['ISSUE']
ctlVal = {"ComicID": ComicID,
"IssueID": IssueID}
newVal = {"Status": "Downloaded"}
myDB.upsert("weekly", newVal, ctlVal)
newVal['IssueNumber'] = issue
newVal['ComicName'] = comicname
newVal['Status'] = "Downloaded"
if pullinfo is not None:
newVal['weeknumber'] = pullinfo['weeknumber']
newVal['year'] = pullinfo['year']
myDB.upsert("oneoffhistory", newVal, ctlVal)
logger.info(module + ' Updating Status (' + downstatus + ') now complete for ' + ComicName + ' issue: ' + IssueNum)
return

View File

@ -81,14 +81,13 @@ class utorrentclient(object):
# (to-do) verify the hash in order to ensure it's loaded here
if str(r.status_code) == '200':
logger.info('Successfully added torrent to uTorrent client.')
hash = self.calculate_torrent_hash(data=tordata)
if mylar.UTORRENT_LABEL:
try:
hash = self.calculate_torrent_hash(data=tordata)
self.setlabel(hash)
except:
logger.warn('Unable to set label for torrent.')
return 'pass'
return hash
else:
return 'fail'

View File

@ -1074,6 +1074,9 @@ class WebInterface(object):
#updater.forceRescan(mi['ComicID'])
issuestoArchive.append(IssueID)
elif action == 'Wanted' or action == 'Retry':
if mi['Status'] == 'Wanted':
logger.fdebug('Issue already set to Wanted status - no need to change it again.')
continue
if action == 'Retry': newaction = 'Wanted'
logger.fdebug(u"Marking %s %s as %s" % (comicname, mi['Issue_Number'], newaction))
issuesToAdd.append(IssueID)
@ -1329,7 +1332,7 @@ class WebInterface(object):
threading.Thread(target=self.queueissue, kwargs=kwargs).start()
queueit.exposed = True
def queueissue(self, mode, ComicName=None, ComicID=None, ComicYear=None, ComicIssue=None, IssueID=None, new=False, redirect=None, SeriesYear=None, SARC=None, IssueArcID=None, manualsearch=None, Publisher=None, pullinfo=None):
def queueissue(self, mode, ComicName=None, ComicID=None, ComicYear=None, ComicIssue=None, IssueID=None, new=False, redirect=None, SeriesYear=None, SARC=None, IssueArcID=None, manualsearch=None, Publisher=None, pullinfo=None, pullweek=None, pullyear=None):
logger.fdebug('ComicID:' + str(ComicID))
logger.fdebug('mode:' + str(mode))
now = datetime.datetime.now()
@ -1383,25 +1386,26 @@ class WebInterface(object):
controlValueDict = {"IssueArcID": IssueArcID}
newStatus = {"Status": "Snatched"}
myDB.upsert("readinglist", newStatus, controlValueDict)
#raise cherrypy.HTTPRedirect("readlist")
return foundcom
elif ComicID is None and mode == 'pullwant':
elif mode == 'pullwant': #and ComicID is None
#this is for marking individual comics from the pullist to be downloaded.
#--comicid & issueid may both be known (or either) at any given point if alt_pull = 2
#because ComicID and IssueID will both be None due to pullist, it's probably
#better to set both to some generic #, and then filter out later...
IssueDate = pullinfo
try:
ComicYear = str(pullinfo)[:4]
ComicYear = IssueDate[:4]
except:
ComicYear == now.year
if Publisher == 'COMICS': Publisher = None
logger.info(u"Marking " + ComicName + " " + ComicIssue + " as wanted...")
foundcom, prov = search.search_init(ComicName=ComicName, IssueNumber=ComicIssue, ComicYear=ComicYear, SeriesYear=None, Publisher=Publisher, IssueDate=IssueDate, StoreDate=IssueDate, IssueID=None, AlternateSearch=None, UseFuzzy=None, ComicVersion=None, allow_packs=False)
foundcom, prov = search.search_init(ComicName=ComicName, IssueNumber=ComicIssue, ComicYear=ComicYear, SeriesYear=None, Publisher=Publisher, IssueDate=IssueDate, StoreDate=IssueDate, IssueID=IssueID, ComicID=ComicID, AlternateSearch=None, mode=mode, UseFuzzy=None, ComicVersion=None, allow_packs=False)
if foundcom['status'] is True:
logger.info(u"Downloaded " + ComicName + " " + ComicIssue)
raise cherrypy.HTTPRedirect("pullist")
#return
logger.info('[ONE-OFF MODE] Successfully Downloaded ' + ComicName + ' ' + ComicIssue)
return updater.foundsearch(ComicID, IssueID, mode=mode, provider=prov, hash=foundcom['info']['t_hash'], pullinfo={'weeknumber': pullweek, 'year': pullyear})
return
elif mode == 'want' or mode == 'want_ann' or manualsearch:
cdname = myDB.selectone("SELECT * from comics where ComicID=?", [ComicID]).fetchone()
ComicName_Filesafe = cdname['ComicName_Filesafe']
@ -1644,6 +1648,7 @@ class WebInterface(object):
watchlibrary = helpers.listLibrary()
issueLibrary = helpers.listIssues(weekinfo['weeknumber'], weekinfo['year'])
oneofflist = helpers.listoneoffs(weekinfo['weeknumber'], weekinfo['year'])
for weekly in w_results:
xfound = False
@ -1662,7 +1667,12 @@ class WebInterface(object):
break
else:
haveit = "No"
xlist = [x['Status'] for x in oneofflist if x['IssueID'] == weekly['IssueID']]
if xlist:
haveit = 'OneOff'
tmp_status = xlist[0]
else:
haveit = "No"
linkit = None
if all([weekly['ComicID'] is not None, weekly['ComicID'] != '']) and haveit == 'No':
@ -1720,7 +1730,6 @@ class WebInterface(object):
weeklyresults = sorted(weeklyresults, key=itemgetter('PUBLISHER', 'COMIC'), reverse=False)
else:
self.manualpull()
if week:
return serve_template(templatename="weeklypull.html", title="Weekly Pull", weeklyresults=weeklyresults, pullfilter=True, weekfold=weekinfo['week_folder'], wantedcount=wantedcount, weekinfo=weekinfo)
else: