mirror of https://github.com/evilhero/mylar
IMP:(#753) Better handling of cbz files when using metatagging option, IMP: Added option for MetaTagging to enable/disable overwriting of existing metadata on cbz files, IMP: Post-Processing will now delete folders that are identical to filenames after successful post-processing, FIX:(#756) Fix for Manual Search for an Issue which resulted in traceback errors upon not finding any matches (Windows), IMP: Added module-name to logging for some modules to help troubleshooting (eventually all will be done), IMP: If a filename contained repeated characters for spaces (ie. ___ or ...), it would fail on post-processing due to the nzbname being off, IMP: Monitoring of CV API will now be persitent on restarts/shutdowns/etc, IMP: Added a maximum CV API global so that it can be adjusted for monitoring more easily should API requirements change, IMP: When Adding new series, will now only mark issues that are actually upcoming as Wanted (based on Store Date)
This commit is contained in:
parent
c510860c46
commit
8b118ed55e
|
@ -625,6 +625,10 @@
|
|||
<div class="row checkbox left clearfix">
|
||||
<input type="checkbox" name="ct_tag_cbl" value="1" ${config['ct_tag_cbl']} /><label>Write ComicBookLover (Cbl) tags (zip comment)</label>
|
||||
</div>
|
||||
<div class="row checkbox left clearfix">
|
||||
<input type="checkbox" name="ct_cbz_overwrite" value="1" ${config['ct_cbz_overwrite']} /><label>Overwrite existing cbz tags (if they exist)</label>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<small><center>If ComicVine API Key specified, will use with ComicTagger</center></small>
|
||||
<small><center>Writing each type of metadata will increase API count respectively</center></small>
|
||||
|
|
|
@ -46,7 +46,7 @@ class PostProcessor(object):
|
|||
FOLDER_NAME = 2
|
||||
FILE_NAME = 3
|
||||
|
||||
def __init__(self, nzb_name, nzb_folder):
|
||||
def __init__(self, nzb_name, nzb_folder, module=None):
|
||||
"""
|
||||
Creates a new post processor with the given file path and optionally an NZB name.
|
||||
|
||||
|
@ -68,6 +68,10 @@ class PostProcessor(object):
|
|||
# name of the NZB that resulted in this folder
|
||||
self.nzb_name = nzb_name
|
||||
self.nzb_folder = nzb_folder
|
||||
if module is not None:
|
||||
self.module = module + '[POST-PROCESSING]'
|
||||
else:
|
||||
self.module = '[POST-PROCESSING]'
|
||||
#self.in_history = False
|
||||
#self.release_group = None
|
||||
#self.is_proper = False
|
||||
|
@ -136,19 +140,20 @@ class PostProcessor(object):
|
|||
|
||||
|
||||
def Process(self):
|
||||
module = self.module
|
||||
self._log("nzb name: " + str(self.nzb_name))
|
||||
self._log("nzb folder: " + str(self.nzb_folder))
|
||||
logger.fdebug("nzb name: " + str(self.nzb_name))
|
||||
logger.fdebug("nzb folder: " + str(self.nzb_folder))
|
||||
logger.fdebug(module + ' nzb name: ' + str(self.nzb_name))
|
||||
logger.fdebug(module + ' nzb folder: ' + str(self.nzb_folder))
|
||||
if mylar.USE_SABNZBD==0:
|
||||
logger.fdebug("Not using SABnzbd")
|
||||
logger.fdebug(module + ' Not using SABnzbd')
|
||||
elif mylar.USE_SABNZBD != 0 and self.nzb_name == 'Manual Run':
|
||||
logger.fdebug('Not using SABnzbd : Manual Run')
|
||||
logger.fdebug(module + ' Not using SABnzbd : Manual Run')
|
||||
else:
|
||||
# if the SAB Directory option is enabled, let's use that folder name and append the jobname.
|
||||
if mylar.SAB_DIRECTORY is not None and mylar.SAB_DIRECTORY is not 'None' and len(mylar.SAB_DIRECTORY) > 4:
|
||||
self.nzb_folder = os.path.join(mylar.SAB_DIRECTORY, self.nzb_name).encode(mylar.SYS_ENCODING)
|
||||
logger.fdebug('SABnzbd Download folder option enabled. Directory set to : ' + self.nzb_folder)
|
||||
logger.fdebug(module + ' SABnzbd Download folder option enabled. Directory set to : ' + self.nzb_folder)
|
||||
|
||||
# -- start. not used.
|
||||
#query SAB to find out if Replace Spaces enabled / not as well as Replace Decimals
|
||||
|
@ -173,18 +178,18 @@ class PostProcessor(object):
|
|||
|
||||
if mylar.USE_NZBGET==1:
|
||||
if self.nzb_name != 'Manual Run':
|
||||
logger.fdebug("Using NZBGET")
|
||||
logger.fdebug("NZB name as passed from NZBGet: " + self.nzb_name)
|
||||
logger.fdebug(module + ' Using NZBGET')
|
||||
logger.fdebug(module + ' NZB name as passed from NZBGet: ' + self.nzb_name)
|
||||
# if the NZBGet Directory option is enabled, let's use that folder name and append the jobname.
|
||||
if self.nzb_name == 'Manual Run':
|
||||
logger.fdebug('Manual Run Post-Processing enabled.')
|
||||
logger.fdebug(module + ' Manual Run Post-Processing enabled.')
|
||||
elif mylar.NZBGET_DIRECTORY is not None and mylar.NZBGET_DIRECTORY is not 'None' and len(mylar.NZBGET_DIRECTORY) > 4:
|
||||
self.nzb_folder = os.path.join(mylar.NZBGET_DIRECTORY, self.nzb_name).encode(mylar.SYS_ENCODING)
|
||||
logger.fdebug('NZBGET Download folder option enabled. Directory set to : ' + self.nzb_folder)
|
||||
logger.fdebug(module + ' NZBGET Download folder option enabled. Directory set to : ' + self.nzb_folder)
|
||||
myDB = db.DBConnection()
|
||||
|
||||
if self.nzb_name == 'Manual Run':
|
||||
logger.fdebug ("manual run initiated")
|
||||
logger.fdebug (module + ' Manual Run initiated')
|
||||
#Manual postprocessing on a folder.
|
||||
#use the nzb_folder to determine every file
|
||||
#walk the dir,
|
||||
|
@ -194,7 +199,7 @@ class PostProcessor(object):
|
|||
comicseries = myDB.select("SELECT * FROM comics")
|
||||
manual_list = []
|
||||
if comicseries is None:
|
||||
logger.error(u"No Series in Watchlist - aborting Manual Post Processing. Maybe you should be running Import?")
|
||||
logger.error(module + ' No Series in Watchlist - aborting Manual Post Processing. Maybe you should be running Import?')
|
||||
return
|
||||
else:
|
||||
ccnt=0
|
||||
|
@ -223,7 +228,7 @@ class PostProcessor(object):
|
|||
temploc = re.sub('[\#\']', '', temploc)
|
||||
|
||||
if 'annual' in temploc.lower():
|
||||
logger.info("annual detected.")
|
||||
logger.info(module + ' Annual detected.')
|
||||
annchk = "yes"
|
||||
fcdigit = helpers.issuedigits(re.sub('annual', '', str(temploc.lower())).strip())
|
||||
issuechk = myDB.selectone("SELECT * from annuals WHERE ComicID=? AND Int_IssueNumber=?", [cs['ComicID'],fcdigit]).fetchone()
|
||||
|
@ -232,7 +237,7 @@ class PostProcessor(object):
|
|||
issuechk = myDB.selectone("SELECT * from issues WHERE ComicID=? AND Int_IssueNumber=?", [cs['ComicID'],fcdigit]).fetchone()
|
||||
|
||||
if issuechk is None:
|
||||
logger.fdebug("No corresponding issue # found for " + str(cs['ComicID']))
|
||||
logger.fdebug(module + ' No corresponding issue # found for ' + str(cs['ComicID']))
|
||||
else:
|
||||
datematch = "True"
|
||||
if len(watchmatch) >= 1 and tmpfc['ComicYear'] is not None:
|
||||
|
@ -249,32 +254,32 @@ class PostProcessor(object):
|
|||
if issuechk['ReleaseDate'] is not None and issuechk['ReleaseDate'] != '0000-00-00':
|
||||
monthval = issuechk['ReleaseDate']
|
||||
if int(issuechk['ReleaseDate'][:4]) < int(tmpfc['ComicYear']):
|
||||
logger.fdebug(str(issuechk['ReleaseDate']) + ' is before the issue year of ' + str(tmpfc['ComicYear']) + ' that was discovered in the filename')
|
||||
logger.fdebug(module + ' ' + str(issuechk['ReleaseDate']) + ' is before the issue year of ' + str(tmpfc['ComicYear']) + ' that was discovered in the filename')
|
||||
datematch = "False"
|
||||
|
||||
else:
|
||||
monthval = issuechk['IssueDate']
|
||||
if int(issuechk['IssueDate'][:4]) < int(tmpfc['ComicYear']):
|
||||
logger.fdebug(str(issuechk['IssueDate']) + ' is before the issue year ' + str(tmpfc['ComicYear']) + ' that was discovered in the filename')
|
||||
logger.fdebug(module + ' ' + str(issuechk['IssueDate']) + ' is before the issue year ' + str(tmpfc['ComicYear']) + ' that was discovered in the filename')
|
||||
datematch = "False"
|
||||
|
||||
if int(monthval[5:7]) == 11 or int(monthval[5:7]) == 12:
|
||||
issyr = int(monthval[:4]) + 1
|
||||
logger.fdebug('issyr is ' + str(issyr))
|
||||
logger.fdebug(module + ' IssueYear (issyr) is ' + str(issyr))
|
||||
elif int(monthval[5:7]) == 1 or int(monthval[5:7]) == 2:
|
||||
issyr = int(monthval[:4]) - 1
|
||||
|
||||
|
||||
|
||||
if datematch == "False" and issyr is not None:
|
||||
logger.fdebug(str(issyr) + ' comparing to ' + str(tmpfc['ComicYear']) + ' : rechecking by month-check versus year.')
|
||||
logger.fdebug(module + ' ' + str(issyr) + ' comparing to ' + str(tmpfc['ComicYear']) + ' : rechecking by month-check versus year.')
|
||||
datematch = "True"
|
||||
if int(issyr) != int(tmpfc['ComicYear']):
|
||||
logger.fdebug('[fail] Issue is before the modified issue year of ' + str(issyr))
|
||||
logger.fdebug(module + '[.:FAIL:.] Issue is before the modified issue year of ' + str(issyr))
|
||||
datematch = "False"
|
||||
|
||||
else:
|
||||
logger.info("Found matching issue # " + str(fcdigit) + " for ComicID: " + str(cs['ComicID']) + " / IssueID: " + str(issuechk['IssueID']))
|
||||
logger.info(module + ' Found matching issue # ' + str(fcdigit) + ' for ComicID: ' + str(cs['ComicID']) + ' / IssueID: ' + str(issuechk['IssueID']))
|
||||
|
||||
if datematch == "True":
|
||||
manual_list.append({"ComicLocation": tmpfc['ComicLocation'],
|
||||
|
@ -283,11 +288,11 @@ class PostProcessor(object):
|
|||
"IssueNumber": issuechk['Issue_Number'],
|
||||
"ComicName": cs['ComicName']})
|
||||
else:
|
||||
logger.fdebug('Incorrect series - not populating..continuing post-processing')
|
||||
logger.fdebug(module + ' Incorrect series - not populating..continuing post-processing')
|
||||
#ccnt+=1
|
||||
|
||||
fn+=1
|
||||
logger.fdebug("There are " + str(len(manual_list)) + " files found that match on your watchlist, " + str(nm) + " do not match anything and will be ignored.")
|
||||
logger.fdebug(module + ' There are ' + str(len(manual_list)) + ' files found that match on your watchlist, ' + str(nm) + ' do not match anything and will be ignored.')
|
||||
|
||||
|
||||
else:
|
||||
|
@ -305,7 +310,7 @@ class PostProcessor(object):
|
|||
nzbname = re.sub('[\,\:\?]', '', str(nzbname))
|
||||
nzbname = re.sub('[\&]', 'and', str(nzbname))
|
||||
|
||||
logger.fdebug("After conversions, nzbname is : " + str(nzbname))
|
||||
logger.fdebug(module + ' After conversions, nzbname is : ' + str(nzbname))
|
||||
# if mylar.USE_NZBGET==1:
|
||||
# nzbname=self.nzb_name
|
||||
self._log("nzbname: " + str(nzbname))
|
||||
|
@ -314,39 +319,39 @@ class PostProcessor(object):
|
|||
|
||||
if nzbiss is None:
|
||||
self._log("Failure - could not initially locate nzbfile in my database to rename.")
|
||||
logger.fdebug("Failure - could not locate nzbfile initially.")
|
||||
logger.fdebug(module + ' Failure - could not locate nzbfile initially')
|
||||
# if failed on spaces, change it all to decimals and try again.
|
||||
nzbname = re.sub('_', '.', str(nzbname))
|
||||
self._log("trying again with this nzbname: " + str(nzbname))
|
||||
logger.fdebug("trying again with nzbname of : " + str(nzbname))
|
||||
logger.fdebug(module + ' Trying to locate nzbfile again with nzbname of : ' + str(nzbname))
|
||||
nzbiss = myDB.selectone("SELECT * from nzblog WHERE nzbname=?", [nzbname]).fetchone()
|
||||
if nzbiss is None:
|
||||
logger.error(u"Unable to locate downloaded file to rename. PostProcessing aborted.")
|
||||
logger.error(module + ' Unable to locate downloaded file to rename. PostProcessing aborted.')
|
||||
return
|
||||
else:
|
||||
self._log("I corrected and found the nzb as : " + str(nzbname))
|
||||
logger.fdebug("auto-corrected and found the nzb as : " + str(nzbname))
|
||||
logger.fdebug(module + ' Auto-corrected and found the nzb as : ' + str(nzbname))
|
||||
issueid = nzbiss['IssueID']
|
||||
else:
|
||||
issueid = nzbiss['IssueID']
|
||||
logger.fdebug("issueid:" + str(issueid))
|
||||
logger.fdebug(module + ' Issueid: ' + str(issueid))
|
||||
sarc = nzbiss['SARC']
|
||||
#use issueid to get publisher, series, year, issue number
|
||||
|
||||
annchk = "no"
|
||||
if 'annual' in nzbname.lower():
|
||||
logger.info("annual detected.")
|
||||
logger.info(module + ' Annual detected.')
|
||||
annchk = "yes"
|
||||
issuenzb = myDB.selectone("SELECT * from annuals WHERE IssueID=? AND ComicName NOT NULL", [issueid]).fetchone()
|
||||
else:
|
||||
issuenzb = myDB.selectone("SELECT * from issues WHERE IssueID=? AND ComicName NOT NULL", [issueid]).fetchone()
|
||||
|
||||
if issuenzb is not None:
|
||||
logger.info("issuenzb found.")
|
||||
logger.info(module + ' issuenzb found.')
|
||||
if helpers.is_number(issueid):
|
||||
sandwich = int(issuenzb['IssueID'])
|
||||
else:
|
||||
logger.info("issuenzb not found.")
|
||||
logger.info(module + ' issuenzb not found.')
|
||||
#if it's non-numeric, it contains a 'G' at the beginning indicating it's a multi-volume
|
||||
#using GCD data. Set sandwich to 1 so it will bypass and continue post-processing.
|
||||
if 'S' in issueid:
|
||||
|
@ -363,7 +368,7 @@ class PostProcessor(object):
|
|||
# At this point, let's just drop it into the Comic Location folder and forget about it..
|
||||
if 'S' in sandwich:
|
||||
self._log("One-off STORYARC mode enabled for Post-Processing for " + str(sarc))
|
||||
logger.info("One-off STORYARC mode enabled for Post-Processing for " + str(sarc))
|
||||
logger.info(module + 'One-off STORYARC mode enabled for Post-Processing for ' + str(sarc))
|
||||
if mylar.STORYARCDIR:
|
||||
storyarcd = os.path.join(mylar.DESTINATION_DIR, "StoryArcs", sarc)
|
||||
self._log("StoryArc Directory set to : " + storyarcd)
|
||||
|
@ -372,7 +377,7 @@ class PostProcessor(object):
|
|||
|
||||
else:
|
||||
self._log("One-off mode enabled for Post-Processing. All I'm doing is moving the file untouched into the Grab-bag directory.")
|
||||
logger.info("One-off mode enabled for Post-Processing. Will move into Grab-bag directory.")
|
||||
logger.info(module + ' One-off mode enabled for Post-Processing. Will move into Grab-bag directory.')
|
||||
self._log("Grab-Bag Directory set to : " + mylar.GRABBAG_DIR)
|
||||
|
||||
for root, dirnames, filenames in os.walk(self.nzb_folder):
|
||||
|
@ -392,15 +397,15 @@ class PostProcessor(object):
|
|||
else:
|
||||
grdst = mylar.DESTINATION_DIR
|
||||
|
||||
filechecker.validateAndCreateDirectory(grdst, True)
|
||||
filechecker.validateAndCreateDirectory(grdst, True, module=module)
|
||||
|
||||
if 'S' in sandwich:
|
||||
#if from a StoryArc, check to see if we're appending the ReadingOrder to the filename
|
||||
if mylar.READ2FILENAME:
|
||||
issuearcid = re.sub('S', '', issueid)
|
||||
logger.fdebug('issuearcid:' + str(issuearcid))
|
||||
logger.fdebug(module + ' issuearcid:' + str(issuearcid))
|
||||
arcdata = myDB.selectone("SELECT * FROM readinglist WHERE IssueArcID=?",[issuearcid]).fetchone()
|
||||
logger.fdebug('readingorder#: ' + str(arcdata['ReadingOrder']))
|
||||
logger.fdebug(module + ' readingorder#: ' + str(arcdata['ReadingOrder']))
|
||||
if int(arcdata['ReadingOrder']) < 10: readord = "00" + str(arcdata['ReadingOrder'])
|
||||
elif int(arcdata['ReadingOrder']) > 10 and int(arcdata['ReadingOrder']) < 99: readord = "0" + str(arcdata['ReadingOrder'])
|
||||
else: readord = str(arcdata['ReadingOrder'])
|
||||
|
@ -412,47 +417,47 @@ class PostProcessor(object):
|
|||
grab_dst = os.path.join(grdst, ofilename)
|
||||
|
||||
self._log("Destination Path : " + grab_dst)
|
||||
logger.info("Destination Path : " + grab_dst)
|
||||
logger.info(module + ' Destination Path : ' + grab_dst)
|
||||
grab_src = os.path.join(self.nzb_folder, ofilename)
|
||||
self._log("Source Path : " + grab_src)
|
||||
logger.info("Source Path : " + grab_src)
|
||||
logger.info(module + ' Source Path : ' + grab_src)
|
||||
|
||||
logger.info("Moving " + str(ofilename) + " into directory : " + str(grdst))
|
||||
logger.info(module + ' Moving ' + str(ofilename) + ' into directory : ' + str(grdst))
|
||||
|
||||
try:
|
||||
shutil.move(grab_src, grab_dst)
|
||||
except (OSError, IOError):
|
||||
self._log("Failed to move directory - check directories and manually re-run.")
|
||||
logger.debug("Failed to move directory - check directories and manually re-run.")
|
||||
logger.debug(module + ' Failed to move directory - check directories and manually re-run.')
|
||||
return
|
||||
#tidyup old path
|
||||
try:
|
||||
shutil.rmtree(self.nzb_folder)
|
||||
except (OSError, IOError):
|
||||
self._log("Failed to remove temporary directory.")
|
||||
logger.debug("Failed to remove temporary directory - check directory and manually re-run.")
|
||||
logger.debug(module + ' Failed to remove temporary directory - check directory and manually re-run.')
|
||||
return
|
||||
|
||||
logger.debug("Removed temporary directory : " + str(self.nzb_folder))
|
||||
logger.debug(module + ' Removed temporary directory : ' + str(self.nzb_folder))
|
||||
self._log("Removed temporary directory : " + self.nzb_folder)
|
||||
#delete entry from nzblog table
|
||||
myDB.action('DELETE from nzblog WHERE issueid=?', [issueid])
|
||||
|
||||
if 'S' in issueid:
|
||||
issuearcid = re.sub('S', '', issueid)
|
||||
logger.info("IssueArcID is : " + str(issuearcid))
|
||||
logger.info(module + ' IssueArcID is : ' + str(issuearcid))
|
||||
ctrlVal = {"IssueArcID": issuearcid}
|
||||
newVal = {"Status": "Downloaded",
|
||||
"Location": grab_dst }
|
||||
myDB.upsert("readinglist",newVal,ctrlVal)
|
||||
logger.info("updated status to Downloaded")
|
||||
logger.info(module + ' Updated status to Downloaded')
|
||||
return self.log
|
||||
|
||||
|
||||
if self.nzb_name == 'Manual Run':
|
||||
#loop through the hits here.
|
||||
if len(manual_list) == '0':
|
||||
logger.info("No hits ... breakout.")
|
||||
logger.info(module + ' No matches for Manual Run ... exiting.')
|
||||
return
|
||||
|
||||
for ml in manual_list:
|
||||
|
@ -460,7 +465,7 @@ class PostProcessor(object):
|
|||
issueid = ml['IssueID']
|
||||
issuenumOG = ml['IssueNumber']
|
||||
self.Process_next(comicid,issueid,issuenumOG,ml)
|
||||
logger.info('Manual post-processing completed.')
|
||||
logger.info(module + ' Manual post-processing completed.')
|
||||
return
|
||||
else:
|
||||
comicid = issuenzb['ComicID']
|
||||
|
@ -468,6 +473,7 @@ class PostProcessor(object):
|
|||
return self.Process_next(comicid,issueid,issuenumOG)
|
||||
|
||||
def Process_next(self,comicid,issueid,issuenumOG,ml=None):
|
||||
module = self.module
|
||||
annchk = "no"
|
||||
extensions = ('.cbr', '.cbz')
|
||||
snatchedtorrent = False
|
||||
|
@ -477,12 +483,12 @@ class PostProcessor(object):
|
|||
if ml is not None and mylar.SNATCHEDTORRENT_NOTIFY:
|
||||
snatchnzb = myDB.selectone("SELECT * from snatched WHERE IssueID=? AND ComicID=? AND (provider=? OR provider=?) AND Status='Snatched'", [issueid,comicid,'KAT','CBT']).fetchone()
|
||||
if snatchnzb is None:
|
||||
logger.fdebug('Was not downloaded with Mylar and the usage of torrents. Disabling torrent manual post-processing completion notification.')
|
||||
logger.fdebug(module + ' Was not downloaded with Mylar and the usage of torrents. Disabling torrent manual post-processing completion notification.')
|
||||
else:
|
||||
logger.fdebug('Was downloaded from ' + snatchnzb['Provider'] + '. Enabling torrent manual post-processing completion notification.')
|
||||
logger.fdebug(module + ' Was downloaded from ' + snatchnzb['Provider'] + '. Enabling torrent manual post-processing completion notification.')
|
||||
snatchedtorrent = True
|
||||
logger.fdebug('issueid: ' + str(issueid))
|
||||
logger.fdebug('issuenumOG: ' + str(issuenumOG))
|
||||
logger.fdebug(module + ' issueid: ' + str(issueid))
|
||||
logger.fdebug(module + ' issuenumOG: ' + str(issuenumOG))
|
||||
if issuenzb is None:
|
||||
issuenzb = myDB.selectone("SELECT * from annuals WHERE issueid=? and comicid=?", [issueid,comicid]).fetchone()
|
||||
annchk = "yes"
|
||||
|
@ -514,7 +520,7 @@ class PostProcessor(object):
|
|||
issdec = int(iss_decval)
|
||||
issueno = str(iss)
|
||||
self._log("Issue Number: " + str(issueno))
|
||||
logger.fdebug("Issue Number: " + str(issueno))
|
||||
logger.fdebug(module + 'Issue Number: ' + str(issueno))
|
||||
else:
|
||||
if len(iss_decval) == 1:
|
||||
iss = iss_b4dec + "." + iss_decval
|
||||
|
@ -524,7 +530,7 @@ class PostProcessor(object):
|
|||
issdec = int(iss_decval.rstrip('0')) * 10
|
||||
issueno = iss_b4dec
|
||||
self._log("Issue Number: " + str(iss))
|
||||
logger.fdebug("Issue Number: " + str(iss))
|
||||
logger.fdebug(module + ' Issue Number: ' + str(iss))
|
||||
else:
|
||||
iss = issuenum
|
||||
issueno = str(iss)
|
||||
|
@ -537,7 +543,7 @@ class PostProcessor(object):
|
|||
elif mylar.ZERO_LEVEL_N == "0x": zeroadd = "0"
|
||||
elif mylar.ZERO_LEVEL_N == "00x": zeroadd = "00"
|
||||
|
||||
logger.fdebug("Zero Suppression set to : " + str(mylar.ZERO_LEVEL_N))
|
||||
logger.fdebug(module + ' Zero Suppression set to : ' + str(mylar.ZERO_LEVEL_N))
|
||||
|
||||
if str(len(issueno)) > 1:
|
||||
if int(issueno) < 0:
|
||||
|
@ -588,29 +594,29 @@ class PostProcessor(object):
|
|||
|
||||
if annchk == "yes":
|
||||
self._log("Annual detected.")
|
||||
logger.fdebug("Pretty Comic Issue is : " + str(prettycomiss))
|
||||
logger.fdebug(module + ' Pretty Comic Issue is : ' + str(prettycomiss))
|
||||
issueyear = issuenzb['IssueDate'][:4]
|
||||
self._log("Issue Year: " + str(issueyear))
|
||||
logger.fdebug("Issue Year : " + str(issueyear))
|
||||
logger.fdebug(module + ' Issue Year : ' + str(issueyear))
|
||||
month = issuenzb['IssueDate'][5:7].replace('-','').strip()
|
||||
month_name = helpers.fullmonth(month)
|
||||
# comicnzb= myDB.action("SELECT * from comics WHERE comicid=?", [comicid]).fetchone()
|
||||
publisher = comicnzb['ComicPublisher']
|
||||
self._log("Publisher: " + publisher)
|
||||
logger.fdebug("Publisher: " + str(publisher))
|
||||
logger.fdebug(module + ' Publisher: ' + str(publisher))
|
||||
#we need to un-unicode this to make sure we can write the filenames properly for spec.chars
|
||||
series = comicnzb['ComicName'].encode('ascii', 'ignore').strip()
|
||||
self._log("Series: " + series)
|
||||
logger.fdebug("Series: " + str(series))
|
||||
logger.fdebug(module + ' Series: ' + str(series))
|
||||
seriesyear = comicnzb['ComicYear']
|
||||
self._log("Year: " + seriesyear)
|
||||
logger.fdebug("Year: " + str(seriesyear))
|
||||
logger.fdebug(module + ' Year: ' + str(seriesyear))
|
||||
comlocation = comicnzb['ComicLocation']
|
||||
self._log("Comic Location: " + comlocation)
|
||||
logger.fdebug("Comic Location: " + str(comlocation))
|
||||
logger.fdebug(module + ' Comic Location: ' + str(comlocation))
|
||||
comversion = comicnzb['ComicVersion']
|
||||
self._log("Comic Version: " + str(comversion))
|
||||
logger.fdebug("Comic Version: " + str(comversion))
|
||||
logger.fdebug(module + ' Comic Version: ' + str(comversion))
|
||||
if comversion is None:
|
||||
comversion = 'None'
|
||||
#if comversion is None, remove it so it doesn't populate with 'None'
|
||||
|
@ -619,8 +625,8 @@ class PostProcessor(object):
|
|||
chunk_f = re.compile(r'\s+')
|
||||
chunk_file_format = chunk_f.sub(' ', chunk_f_f)
|
||||
self._log("No version # found for series - tag will not be available for renaming.")
|
||||
logger.fdebug("No version # found for series, removing from filename")
|
||||
logger.fdebug("new format is now: " + str(chunk_file_format))
|
||||
logger.fdebug(module + ' No version # found for series, removing from filename')
|
||||
logger.fdebug(module + ' New format is now: ' + str(chunk_file_format))
|
||||
else:
|
||||
chunk_file_format = mylar.FILE_FORMAT
|
||||
|
||||
|
@ -628,16 +634,16 @@ class PostProcessor(object):
|
|||
chunk_f_f = re.sub('\$Annual','',chunk_file_format)
|
||||
chunk_f = re.compile(r'\s+')
|
||||
chunk_file_format = chunk_f.sub(' ', chunk_f_f)
|
||||
logger.fdebug('not an annual - removing from filename paramaters')
|
||||
logger.fdebug('new format: ' + str(chunk_file_format))
|
||||
logger.fdebug(module + ' Not an annual - removing from filename paramaters')
|
||||
logger.fdebug(module + ' New format: ' + str(chunk_file_format))
|
||||
|
||||
else:
|
||||
logger.fdebug('chunk_file_format is: ' + str(chunk_file_format))
|
||||
logger.fdebug(module + ' Chunk_file_format is: ' + str(chunk_file_format))
|
||||
if '$Annual' not in chunk_file_format:
|
||||
#if it's an annual, but $Annual isn't specified in file_format, we need to
|
||||
#force it in there, by default in the format of $Annual $Issue
|
||||
prettycomiss = "Annual " + str(prettycomiss)
|
||||
logger.fdebug('prettycomiss: ' + str(prettycomiss))
|
||||
logger.fdebug(module + ' prettycomiss: ' + str(prettycomiss))
|
||||
|
||||
|
||||
ofilename = None
|
||||
|
@ -649,7 +655,7 @@ class PostProcessor(object):
|
|||
#tag the meta.
|
||||
if mylar.ENABLE_META:
|
||||
self._log("Metatagging enabled - proceeding...")
|
||||
logger.fdebug("Metatagging enabled - proceeding...")
|
||||
logger.fdebug(module + ' Metatagging enabled - proceeding...')
|
||||
pcheck = "pass"
|
||||
try:
|
||||
import cmtagmylar
|
||||
|
@ -659,21 +665,23 @@ class PostProcessor(object):
|
|||
pcheck = cmtagmylar.run(self.nzb_folder, issueid=issueid, manual="yes", filename=ml['ComicLocation'])
|
||||
|
||||
except ImportError:
|
||||
logger.fdebug("comictaggerlib not found on system. Ensure the ENTIRE lib directory is located within mylar/lib/comictaggerlib/")
|
||||
logger.fdebug("continuing with PostProcessing, but I'm not using metadata.")
|
||||
logger.fdebug(module + ' comictaggerlib not found on system. Ensure the ENTIRE lib directory is located within mylar/lib/comictaggerlib/')
|
||||
logger.fdebug(module + ' continuing with PostProcessing, but I am not using metadata.')
|
||||
pcheck = "fail"
|
||||
|
||||
if pcheck == "fail":
|
||||
self._log("Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging...")
|
||||
logger.fdebug("Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging...")
|
||||
logger.fdebug(module + ' Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging...')
|
||||
#we need to set this to the cbz file since not doing it will result in nothing getting moved.
|
||||
#not sure how to do this atm
|
||||
elif pcheck == "unrar error":
|
||||
self._log("This is a corrupt archive - whether CRC errors or it's incomplete. Marking as BAD, and retrying a different copy.")
|
||||
logger.error("This is a corrupt archive - whether CRC errors or it's incomplete. Marking as BAD, and retrying a different copy.")
|
||||
logger.error(module + ' This is a corrupt archive - whether CRC errors or it is incomplete. Marking as BAD, and retrying a different copy.')
|
||||
return self.log
|
||||
else:
|
||||
otofilename = pcheck
|
||||
self._log("Sucessfully wrote metadata to .cbz - Continuing..")
|
||||
logger.fdebug("Sucessfully wrote metadata to .cbz (" + str(otofilename) + ") - Continuing..")
|
||||
logger.info(module + ' Sucessfully wrote metadata to .cbz (' + os.path.split(otofilename)[1] + ') - Continuing..')
|
||||
#Run Pre-script
|
||||
|
||||
if mylar.ENABLE_PRE_SCRIPTS:
|
||||
|
@ -723,33 +731,33 @@ class PostProcessor(object):
|
|||
path, ext = os.path.splitext(ofilename)
|
||||
|
||||
if odir is None:
|
||||
logger.fdebug('no root folder set.')
|
||||
logger.fdebug(module + ' No root folder set.')
|
||||
odir = self.nzb_folder
|
||||
logger.fdebug('odir: ' + str(odir))
|
||||
logger.fdebug('ofilename: ' + str(ofilename))
|
||||
logger.fdebug(module + ' odir: ' + str(odir))
|
||||
logger.fdebug(module + ' ofilename: ' + str(ofilename))
|
||||
|
||||
else:
|
||||
if pcheck == "fail":
|
||||
otofilename = ml['ComicLocation']
|
||||
logger.fdebug('otofilename:' + str(otofilename))
|
||||
logger.fdebug(module + ' otofilename:' + str(otofilename))
|
||||
odir, ofilename = os.path.split(otofilename)
|
||||
logger.fdebug('odir: ' + str(odir))
|
||||
logger.fdebug('ofilename: ' + str(ofilename))
|
||||
logger.fdebug(module + ' odir: ' + str(odir))
|
||||
logger.fdebug(module + ' ofilename: ' + str(ofilename))
|
||||
path, ext = os.path.splitext(ofilename)
|
||||
logger.fdebug('path: ' + str(path))
|
||||
logger.fdebug('ext:' + str(ext))
|
||||
logger.fdebug(module + ' path: ' + str(path))
|
||||
logger.fdebug(module + ' ext:' + str(ext))
|
||||
|
||||
if ofilename is None:
|
||||
logger.error(u"Aborting PostProcessing - the filename doesn't exist in the location given. Make sure that " + str(self.nzb_folder) + " exists and is the correct location.")
|
||||
logger.error(module + ' Aborting PostProcessing - the filename does not exist in the location given. Make sure that ' + str(self.nzb_folder) + ' exists and is the correct location.')
|
||||
return
|
||||
self._log("Original Filename: " + ofilename)
|
||||
self._log("Original Extension: " + ext)
|
||||
logger.fdebug("Original Filname: " + str(ofilename))
|
||||
logger.fdebug("Original Extension: " + str(ext))
|
||||
logger.fdebug(module + ' Original Filname: ' + str(ofilename))
|
||||
logger.fdebug(module + ' Original Extension: ' + str(ext))
|
||||
|
||||
if mylar.FILE_FORMAT == '' or not mylar.RENAME_FILES:
|
||||
self._log("Rename Files isn't enabled...keeping original filename.")
|
||||
logger.fdebug("Rename Files isn't enabled - keeping original filename.")
|
||||
logger.fdebug(module + ' Rename Files is not enabled - keeping original filename.')
|
||||
#check if extension is in nzb_name - will screw up otherwise
|
||||
if ofilename.lower().endswith(extensions):
|
||||
nfilename = ofilename[:-4]
|
||||
|
@ -763,35 +771,35 @@ class PostProcessor(object):
|
|||
nfilename = re.sub('[\,\:\?]', '', nfilename)
|
||||
nfilename = re.sub('[\/]', '-', nfilename)
|
||||
self._log("New Filename: " + nfilename)
|
||||
logger.fdebug("New Filename: " + str(nfilename))
|
||||
logger.fdebug(module + ' New Filename: ' + str(nfilename))
|
||||
|
||||
#src = os.path.join(self.nzb_folder, ofilename)
|
||||
src = os.path.join(odir, ofilename)
|
||||
filechecker.validateAndCreateDirectory(comlocation, True)
|
||||
filechecker.validateAndCreateDirectory(comlocation, True, module=module)
|
||||
|
||||
if mylar.LOWERCASE_FILENAMES:
|
||||
dst = (comlocation + "/" + nfilename + ext).lower()
|
||||
dst = os.path.join(comlocation, (nfilename + ext).lower())
|
||||
else:
|
||||
dst = comlocation + "/" + nfilename + ext.lower()
|
||||
dst = os.path.join(comlocation, (nfilename + ext.lower()))
|
||||
self._log("Source:" + src)
|
||||
self._log("Destination:" + dst)
|
||||
logger.fdebug("Source: " + str(src))
|
||||
logger.fdebug("Destination: " + str(dst))
|
||||
logger.fdebug(module + ' Source: ' + str(src))
|
||||
logger.fdebug(module + ' Destination: ' + str(dst))
|
||||
|
||||
if ml is None:
|
||||
#downtype = for use with updater on history table to set status to 'Downloaded'
|
||||
downtype = 'True'
|
||||
#non-manual run moving/deleting...
|
||||
logger.fdebug('self.nzb_folder: ' + self.nzb_folder)
|
||||
logger.fdebug('odir: ' + str(odir))
|
||||
logger.fdebug('ofilename:' + str(ofilename))
|
||||
logger.fdebug('nfilename:' + str(nfilename + ext))
|
||||
logger.fdebug(module + ' self.nzb_folder: ' + self.nzb_folder)
|
||||
logger.fdebug(module + ' odir: ' + str(odir))
|
||||
logger.fdebug(module + ' ofilename:' + str(ofilename))
|
||||
logger.fdebug(module + ' nfilename:' + str(nfilename + ext))
|
||||
if mylar.RENAME_FILES:
|
||||
if str(ofilename) != str(nfilename + ext):
|
||||
logger.fdebug("Renaming " + os.path.join(odir, str(ofilename)) + " ..to.. " + os.path.join(odir,str(nfilename + ext)))
|
||||
logger.fdebug(module + ' Renaming ' + os.path.join(odir, str(ofilename)) + ' ..to.. ' + os.path.join(odir,str(nfilename + ext)))
|
||||
os.rename(os.path.join(odir, str(ofilename)), os.path.join(odir,str(nfilename + ext)))
|
||||
else:
|
||||
logger.fdebug('filename is identical as original, not renaming.')
|
||||
logger.fdebug(module + ' Filename is identical as original, not renaming.')
|
||||
|
||||
#src = os.path.join(self.nzb_folder, str(nfilename + ext))
|
||||
src = os.path.join(odir, str(nfilename + ext))
|
||||
|
@ -800,6 +808,8 @@ class PostProcessor(object):
|
|||
except (OSError, IOError):
|
||||
self._log("Failed to move directory - check directories and manually re-run.")
|
||||
self._log("Post-Processing ABORTED.")
|
||||
logger.warn(module + ' Failed to move directory : ' + src + ' to ' + dst + ' - check directory and manually re-run')
|
||||
logger.warn(module + ' Post-Processing ABORTED')
|
||||
return
|
||||
#tidyup old path
|
||||
try:
|
||||
|
@ -807,39 +817,40 @@ class PostProcessor(object):
|
|||
except (OSError, IOError):
|
||||
self._log("Failed to remove temporary directory - check directory and manually re-run.")
|
||||
self._log("Post-Processing ABORTED.")
|
||||
logger.warn(module + ' Failed to remove temporary directory : ' + self.nzb_folder)
|
||||
logger.warn(module + ' Post-Processing ABORTED')
|
||||
return
|
||||
|
||||
self._log("Removed temporary directory : " + str(self.nzb_folder))
|
||||
logger.fdebug(module + ' Removed temporary directory : ' + self.nzb_folder)
|
||||
else:
|
||||
#downtype = for use with updater on history table to set status to 'Post-Processed'
|
||||
downtype = 'PP'
|
||||
#Manual Run, this is the portion.
|
||||
if mylar.RENAME_FILES:
|
||||
if str(ofilename) != str(nfilename + ext):
|
||||
logger.fdebug("Renaming " + os.path.join(self.nzb_folder, str(ofilename)) + " ..to.. " + os.path.join(self.nzb_folder,str(nfilename + ext)))
|
||||
logger.fdebug(module + ' Renaming ' + os.path.join(odir, str(ofilename)) + ' ..to.. ' + os.path.join(odir, self.nzb_folder,str(nfilename + ext)))
|
||||
os.rename(os.path.join(odir, str(ofilename)), os.path.join(odir ,str(nfilename + ext)))
|
||||
else:
|
||||
logger.fdebug('filename is identical as original, not renaming.')
|
||||
logger.fdebug(module + ' Filename is identical as original, not renaming.')
|
||||
src = os.path.join(odir, str(nfilename + ext))
|
||||
logger.fdebug('odir rename: ' + os.path.join(odir, str(ofilename)) + ' TO ' + os.path.join(odir, str(nfilename + ext)))
|
||||
logger.fdebug('odir src : ' + os.path.join(odir, str(nfilename + ext)))
|
||||
logger.fdebug("Moving " + src + " ... to ... " + dst)
|
||||
logger.fdebug(module + ' odir src : ' + os.path.join(odir, str(nfilename + ext)))
|
||||
logger.fdebug(module + ' Moving ' + src + ' ... to ... ' + dst)
|
||||
try:
|
||||
shutil.move(src, dst)
|
||||
except (OSError, IOError):
|
||||
logger.fdebug("Failed to move directory - check directories and manually re-run.")
|
||||
logger.fdebug("Post-Processing ABORTED.")
|
||||
logger.fdebug(module + ' Failed to move directory - check directories and manually re-run.')
|
||||
logger.fdebug(module + ' Post-Processing ABORTED.')
|
||||
return
|
||||
logger.fdebug("Successfully moved to : " + dst)
|
||||
logger.fdebug(module + ' Successfully moved to : ' + dst)
|
||||
|
||||
#tidyup old path
|
||||
#try:
|
||||
# os.remove(os.path.join(self.nzb_folder, str(ofilename)))
|
||||
# logger.fdebug("Deleting : " + os.path.join(self.nzb_folder, str(ofilename)))
|
||||
#except (OSError, IOError):
|
||||
# logger.fdebug("Failed to remove temporary directory - check directory and manually re-run.")
|
||||
# logger.fdebug("Post-Processing ABORTED.")
|
||||
# return
|
||||
#logger.fdebug("Removed temporary directory : " + str(self.nzb_folder))
|
||||
try:
|
||||
if os.path.isdir(odir) and odir != self.nzb_folder:
|
||||
logger.fdebug(module + ' Tidying up. Deleting folder : ' + odir)
|
||||
shutil.rmtree(odir)
|
||||
except (OSError, IOError):
|
||||
logger.fdebug(module + ' Failed to remove temporary directory (' + odir + ') - Processing will continue, but manual removal is necessary')
|
||||
|
||||
#Hopefully set permissions on downloaded file
|
||||
try:
|
||||
|
@ -847,27 +858,25 @@ class PostProcessor(object):
|
|||
os.umask(0)
|
||||
os.chmod(dst.rstrip(), permission)
|
||||
except OSError:
|
||||
logger.error('Failed to change file permissions. Ensure that the user running Mylar has proper permissions to change permissions in : ' + dst)
|
||||
logger.fdebug('Continuing post-processing but unable to change file permissions in ' + dst)
|
||||
logger.error(module + ' Failed to change file permissions. Ensure that the user running Mylar has proper permissions to change permissions in : ' + dst)
|
||||
logger.fdebug(module + ' Continuing post-processing but unable to change file permissions in ' + dst)
|
||||
#delete entry from nzblog table
|
||||
myDB.action('DELETE from nzblog WHERE issueid=?', [issueid])
|
||||
#update snatched table to change status to Downloaded
|
||||
|
||||
if annchk == "no":
|
||||
updater.foundsearch(comicid, issueid, down=downtype)
|
||||
updater.foundsearch(comicid, issueid, down=downtype, module=module)
|
||||
dispiss = 'issue: ' + str(issuenumOG)
|
||||
else:
|
||||
updater.foundsearch(comicid, issueid, mode='want_ann', down=downtype)
|
||||
updater.foundsearch(comicid, issueid, mode='want_ann', down=downtype, module=module)
|
||||
dispiss = 'annual issue: ' + str(issuenumOG)
|
||||
|
||||
#force rescan of files
|
||||
updater.forceRescan(comicid)
|
||||
logger.info(u"Post-Processing completed for: " + series + " " + dispiss )
|
||||
self._log(u"Post Processing SUCCESSFUL! ")
|
||||
updater.forceRescan(comicid,module=module)
|
||||
|
||||
if mylar.WEEKFOLDER:
|
||||
#if enabled, will *copy* the post-processed file to the weeklypull list folder for the given week.
|
||||
weeklypull.weekly_singlecopy(comicid,issuenum,str(nfilename+ext),dst)
|
||||
weeklypull.weekly_singlecopy(comicid,issuenum,str(nfilename+ext),dst,module=module)
|
||||
|
||||
# retrieve/create the corresponding comic objects
|
||||
if mylar.ENABLE_EXTRA_SCRIPTS:
|
||||
|
@ -897,6 +906,8 @@ class PostProcessor(object):
|
|||
pass
|
||||
else:
|
||||
#manual run + not snatched torrent (or normal manual-run)
|
||||
logger.info(module + ' Post-Processing completed for: ' + series + ' ' + dispiss )
|
||||
self._log(u"Post Processing SUCCESSFUL! ")
|
||||
return self.log
|
||||
|
||||
if annchk == "no":
|
||||
|
@ -907,36 +918,37 @@ class PostProcessor(object):
|
|||
|
||||
if mylar.PROWL_ENABLED:
|
||||
pushmessage = prline
|
||||
logger.info(u"Prowl request")
|
||||
prowl = notifiers.PROWL()
|
||||
prowl.notify(pushmessage,"Download and Postprocessing completed")
|
||||
prowl.notify(pushmessage,"Download and Postprocessing completed", module=module)
|
||||
|
||||
if mylar.NMA_ENABLED:
|
||||
nma = notifiers.NMA()
|
||||
nma.notify(prline=prline, prline2=prline2)
|
||||
nma.notify(prline=prline, prline2=prline2, module=module)
|
||||
|
||||
if mylar.PUSHOVER_ENABLED:
|
||||
logger.info(u"Pushover request")
|
||||
pushover = notifiers.PUSHOVER()
|
||||
pushover.notify(prline, "Download and Post-Processing completed")
|
||||
pushover.notify(prline, "Download and Post-Processing completed", module=module)
|
||||
|
||||
if mylar.BOXCAR_ENABLED:
|
||||
boxcar = notifiers.BOXCAR()
|
||||
boxcar.notify(prline=prline, prline2=prline2)
|
||||
boxcar.notify(prline=prline, prline2=prline2, module=module)
|
||||
|
||||
if mylar.PUSHBULLET_ENABLED:
|
||||
pushbullet = notifiers.PUSHBULLET()
|
||||
pushbullet.notify(prline=prline, prline2=prline2)
|
||||
pushbullet.notify(prline=prline, prline2=prline2, module=module)
|
||||
|
||||
logger.info(module + ' Post-Processing completed for: ' + series + ' ' + dispiss )
|
||||
self._log(u"Post Processing SUCCESSFUL! ")
|
||||
return self.log
|
||||
|
||||
class FolderCheck():
|
||||
|
||||
def run(self):
|
||||
module = '[FOLDER-CHECK]'
|
||||
import PostProcessor, logger
|
||||
#monitor a selected folder for 'snatched' files that haven't been processed
|
||||
logger.info('Checking folder ' + mylar.CHECK_FOLDER + ' for newly snatched downloads')
|
||||
logger.info(module + ' Checking folder ' + mylar.CHECK_FOLDER + ' for newly snatched downloads')
|
||||
PostProcess = PostProcessor.PostProcessor('Manual Run', mylar.CHECK_FOLDER)
|
||||
result = PostProcess.Process()
|
||||
logger.info('Finished checking for newly snatched downloads')
|
||||
logger.info(module + ' Finished checking for newly snatched downloads')
|
||||
|
||||
|
|
|
@ -109,6 +109,7 @@ COMICVINE_API = None
|
|||
DEFAULT_CVAPI = '583939a3df0a25fc4e8b7a29934a13078002dc27'
|
||||
CVAPI_COUNT = 0
|
||||
CVAPI_TIME = None
|
||||
CVAPI_MAX = 400
|
||||
|
||||
CHECK_GITHUB = False
|
||||
CHECK_GITHUB_ON_STARTUP = False
|
||||
|
@ -269,6 +270,7 @@ ENABLE_META = 0
|
|||
CMTAGGER_PATH = None
|
||||
CT_TAG_CR = 1
|
||||
CT_TAG_CBL = 1
|
||||
CT_CBZ_OVERWRITE = 0
|
||||
|
||||
ENABLE_RSS = 0
|
||||
RSS_CHECKINTERVAL = 20
|
||||
|
@ -343,7 +345,7 @@ def initialize():
|
|||
|
||||
with INIT_LOCK:
|
||||
|
||||
global __INITIALIZED__, COMICVINE_API, DEFAULT_CVAPI, CVAPI_COUNT, CVAPI_TIME, FULL_PATH, PROG_DIR, VERBOSE, DAEMON, COMICSORT, DATA_DIR, CONFIG_FILE, CFG, CONFIG_VERSION, LOG_DIR, CACHE_DIR, MAX_LOGSIZE, LOGVERBOSE, OLDCONFIG_VERSION, OS_DETECT, OS_LANG, OS_ENCODING, \
|
||||
global __INITIALIZED__, COMICVINE_API, DEFAULT_CVAPI, CVAPI_COUNT, CVAPI_TIME, CVAPI_MAX, FULL_PATH, PROG_DIR, VERBOSE, DAEMON, COMICSORT, DATA_DIR, CONFIG_FILE, CFG, CONFIG_VERSION, LOG_DIR, CACHE_DIR, MAX_LOGSIZE, LOGVERBOSE, OLDCONFIG_VERSION, OS_DETECT, OS_LANG, OS_ENCODING, \
|
||||
HTTP_PORT, HTTP_HOST, HTTP_USERNAME, HTTP_PASSWORD, HTTP_ROOT, HTTPS_FORCE_ON, API_ENABLED, API_KEY, LAUNCH_BROWSER, GIT_PATH, SAFESTART, \
|
||||
CURRENT_VERSION, LATEST_VERSION, CHECK_GITHUB, CHECK_GITHUB_ON_STARTUP, CHECK_GITHUB_INTERVAL, USER_AGENT, DESTINATION_DIR, \
|
||||
DOWNLOAD_DIR, USENET_RETENTION, SEARCH_INTERVAL, NZB_STARTUP_SEARCH, INTERFACE, AUTOWANT_ALL, AUTOWANT_UPCOMING, ZERO_LEVEL, ZERO_LEVEL_N, COMIC_COVER_LOCAL, HIGHCOUNT, \
|
||||
|
@ -351,7 +353,7 @@ def initialize():
|
|||
USE_NZBGET, NZBGET_HOST, NZBGET_PORT, NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_PRIORITY, NZBGET_DIRECTORY, NZBSU, NZBSU_UID, NZBSU_APIKEY, DOGNZB, DOGNZB_UID, DOGNZB_APIKEY, \
|
||||
NEWZNAB, NEWZNAB_NAME, NEWZNAB_HOST, NEWZNAB_APIKEY, NEWZNAB_UID, NEWZNAB_ENABLED, EXTRA_NEWZNABS, NEWZNAB_EXTRA, \
|
||||
RAW, RAW_PROVIDER, RAW_USERNAME, RAW_PASSWORD, RAW_GROUPS, EXPERIMENTAL, ALTEXPERIMENTAL, \
|
||||
ENABLE_META, CMTAGGER_PATH, CT_TAG_CR, CT_TAG_CBL, INDIE_PUB, BIGGIE_PUB, IGNORE_HAVETOTAL, PROVIDER_ORDER, \
|
||||
ENABLE_META, CMTAGGER_PATH, CT_TAG_CR, CT_TAG_CBL, CT_CBZ_OVERWRITE, INDIE_PUB, BIGGIE_PUB, IGNORE_HAVETOTAL, PROVIDER_ORDER, \
|
||||
dbUpdateScheduler, searchScheduler, RSSScheduler, WeeklyScheduler, VersionScheduler, FolderMonitorScheduler, \
|
||||
ENABLE_TORRENTS, MINSEEDS, TORRENT_LOCAL, LOCAL_WATCHDIR, TORRENT_SEEDBOX, SEEDBOX_HOST, SEEDBOX_PORT, SEEDBOX_USER, SEEDBOX_PASS, SEEDBOX_WATCHDIR, \
|
||||
ENABLE_RSS, RSS_CHECKINTERVAL, RSS_LASTRUN, ENABLE_TORRENT_SEARCH, ENABLE_KAT, KAT_PROXY, ENABLE_CBT, CBT_PASSKEY, SNATCHEDTORRENT_NOTIFY, \
|
||||
|
@ -386,6 +388,9 @@ def initialize():
|
|||
COMICVINE_API = check_setting_str(CFG, 'General', 'comicvine_api', '')
|
||||
if not COMICVINE_API:
|
||||
COMICVINE_API = None
|
||||
CVAPI_COUNT = check_setting_int(CFG, 'General', 'cvapi_count', 0)
|
||||
CVAPI_TIME = check_setting_str(CFG, 'General', 'cvapi_time', '')
|
||||
helpers.cvapi_check() #get the values logged in.
|
||||
HTTP_HOST = check_setting_str(CFG, 'General', 'http_host', '0.0.0.0')
|
||||
HTTP_USERNAME = check_setting_str(CFG, 'General', 'http_username', '')
|
||||
HTTP_PASSWORD = check_setting_str(CFG, 'General', 'http_password', '')
|
||||
|
@ -520,6 +525,7 @@ def initialize():
|
|||
CMTAGGER_PATH = check_setting_str(CFG, 'General', 'cmtagger_path', '')
|
||||
CT_TAG_CR = bool(check_setting_int(CFG, 'General', 'ct_tag_cr', 1))
|
||||
CT_TAG_CBL = bool(check_setting_int(CFG, 'General', 'ct_tag_cbl', 1))
|
||||
CT_CBZ_OVERWRITE = bool(check_setting_int(CFG, 'General', 'ct_cbz_overwrite', 0))
|
||||
|
||||
INDIE_PUB = check_setting_str(CFG, 'General', 'indie_pub', '75')
|
||||
BIGGIE_PUB = check_setting_str(CFG, 'General', 'biggie_pub', '55')
|
||||
|
@ -1018,6 +1024,11 @@ def config_write():
|
|||
new_config['General'] = {}
|
||||
new_config['General']['config_version'] = CONFIG_VERSION
|
||||
new_config['General']['comicvine_api'] = COMICVINE_API
|
||||
#write the current CV API time / count here so it's persistent through reboots/restarts.
|
||||
#get the current values.
|
||||
helpers.cvapi_check()
|
||||
new_config['General']['cvapi_count'] = CVAPI_COUNT
|
||||
new_config['General']['cvapi_time'] = CVAPI_TIME
|
||||
new_config['General']['http_port'] = HTTP_PORT
|
||||
new_config['General']['http_host'] = HTTP_HOST
|
||||
new_config['General']['http_username'] = HTTP_USERNAME
|
||||
|
@ -1100,6 +1111,7 @@ def config_write():
|
|||
new_config['General']['cmtagger_path'] = CMTAGGER_PATH
|
||||
new_config['General']['ct_tag_cr'] = int(CT_TAG_CR)
|
||||
new_config['General']['ct_tag_cbl'] = int(CT_TAG_CBL)
|
||||
new_config['General']['ct_cbz_overwrite'] = int(CT_CBZ_OVERWRITE)
|
||||
new_config['General']['indie_pub'] = INDIE_PUB
|
||||
new_config['General']['biggie_pub'] = BIGGIE_PUB
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@ import os, errno
|
|||
import sys
|
||||
import re
|
||||
import glob
|
||||
import shlex
|
||||
import platform
|
||||
import shutil
|
||||
import time
|
||||
|
@ -17,8 +18,12 @@ import mylar
|
|||
from mylar import logger
|
||||
from mylar.helpers import cvapi_check
|
||||
|
||||
def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
|
||||
logger.fdebug('[META-TAGGING] dirName:' + dirName)
|
||||
def run (dirName, nzbName=None, issueid=None, manual=None, filename=None, module=None):
|
||||
if module is None:
|
||||
module = ''
|
||||
module += '[META-TAGGER]'
|
||||
|
||||
logger.fdebug(module + ' dirName:' + dirName)
|
||||
|
||||
## Set the directory in which comictagger and other external commands are located - IMPORTANT - ##
|
||||
# ( User may have to modify, depending on their setup, but these are some guesses for now )
|
||||
|
@ -41,8 +46,8 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
|
|||
if not os.path.isfile(unrar_cmd):
|
||||
unrar_cmd = "C:\Program Files (x86)\WinRAR\UnRAR.exe"
|
||||
if not os.path.isfile(unrar_cmd):
|
||||
logger.fdebug('[META-TAGGING] Unable to locate UnRAR.exe - make sure it is installed.')
|
||||
logger.fdebug('[META-TAGGING] Aborting meta-tagging.')
|
||||
logger.fdebug(module + ' Unable to locate UnRAR.exe - make sure it is installed.')
|
||||
logger.fdebug(module + ' Aborting meta-tagging.')
|
||||
return "fail"
|
||||
|
||||
|
||||
|
@ -56,8 +61,8 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
|
|||
try:
|
||||
import configparser
|
||||
except ImportError:
|
||||
logger.fdebug('[META-TAGGING] configparser not found on system. Please install manually in order to write metadata')
|
||||
logger.fdebug('[META-TAGGING] continuing with PostProcessing, but I am not using metadata.')
|
||||
logger.fdebug(module + ' configparser not found on system. Please install manually in order to write metadata')
|
||||
logger.fdebug(module + ' continuing with PostProcessing, but I am not using metadata.')
|
||||
return "fail"
|
||||
|
||||
#set this to the lib path (ie. '<root of mylar>/lib')
|
||||
|
@ -72,9 +77,9 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
|
|||
file_conversion = True
|
||||
file_extension_fixing = True
|
||||
if not os.path.exists( unrar_cmd ):
|
||||
logger.fdebug('[META-TAGGING] WARNING: cannot find the unrar command.')
|
||||
logger.fdebug('[META-TAGGING] File conversion and extension fixing not available')
|
||||
logger.fdebug('[META-TAGGING] You probably need to edit this script, or install the missing tool, or both!')
|
||||
logger.fdebug(module + ' WARNING: cannot find the unrar command.')
|
||||
logger.fdebug(module + ' File conversion and extension fixing not available')
|
||||
logger.fdebug(module + ' You probably need to edit this script, or install the missing tool, or both!')
|
||||
file_conversion = False
|
||||
file_extension_fixing = False
|
||||
|
||||
|
@ -86,35 +91,42 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
|
|||
if manual is None:
|
||||
comicpath = os.path.join( downloadpath , "temp" )
|
||||
else:
|
||||
chkpath, chkfile = os.path.split(filename)
|
||||
logger.fdebug(module + ' chkpath: ' + chkpath)
|
||||
logger.fdebug(module + ' chkfile: ' + chkfile)
|
||||
extensions = ('.cbr', '.cbz')
|
||||
if os.path.isdir(chkpath) and chkpath != downloadpath:
|
||||
logger.fdebug(module + ' Changing ' + downloadpath + ' location to ' + chkpath + ' as it is a directory.')
|
||||
downloadpath = chkpath
|
||||
comicpath = os.path.join( downloadpath, issueid )
|
||||
unrar_folder = os.path.join( comicpath , "unrard" )
|
||||
|
||||
logger.fdebug('[META-TAGGING] ---directory settings.')
|
||||
logger.fdebug('[META-TAGGING] scriptname : ' + scriptname)
|
||||
logger.fdebug('[META-TAGGING] downloadpath : ' + downloadpath)
|
||||
logger.fdebug('[META-TAGGING] sabnzbdscriptpath : ' + sabnzbdscriptpath)
|
||||
logger.fdebug('[META-TAGGING] comicpath : ' + comicpath)
|
||||
logger.fdebug('[META-TAGGING] unrar_folder : ' + unrar_folder)
|
||||
logger.fdebug('[META-TAGGING] Running the Post-SabNZBd/Mylar script')
|
||||
logger.fdebug(module + ' Paths / Locations:')
|
||||
logger.fdebug(module + ' scriptname : ' + scriptname)
|
||||
logger.fdebug(module + ' downloadpath : ' + downloadpath)
|
||||
logger.fdebug(module + ' sabnzbdscriptpath : ' + sabnzbdscriptpath)
|
||||
logger.fdebug(module + ' comicpath : ' + comicpath)
|
||||
logger.fdebug(module + ' unrar_folder : ' + unrar_folder)
|
||||
logger.fdebug(module + ' Running the ComicTagger Add-on for Mylar')
|
||||
|
||||
if os.path.exists( comicpath ):
|
||||
shutil.rmtree( comicpath )
|
||||
|
||||
logger.fdebug('[META-TAGGING] Attempting to create directory @: ' + str(comicpath))
|
||||
logger.fdebug(module + ' Attempting to create directory @: ' + str(comicpath))
|
||||
try:
|
||||
os.makedirs(comicpath)
|
||||
except OSError:
|
||||
raise
|
||||
|
||||
logger.fdebug('[META-TAGGING] Created directory @ : ' + str(comicpath))
|
||||
logger.fdebug('[META-TAGGING] Filename is : ' + str(filename))
|
||||
logger.fdebug(module + ' Created directory @ : ' + str(comicpath))
|
||||
logger.fdebug(module + ' Filename is : ' + str(filename))
|
||||
if filename is None:
|
||||
filename_list = glob.glob( os.path.join( downloadpath, "*.cbz" ) )
|
||||
filename_list.extend( glob.glob( os.path.join( downloadpath, "*.cbr" ) ) )
|
||||
fcount = 1
|
||||
for f in filename_list:
|
||||
if fcount > 1:
|
||||
logger.fdebug('[META-TAGGING] More than one cbr/cbz within path, performing Post-Process on first file detected: ' + f)
|
||||
logger.fdebug(module + ' More than one cbr/cbz within path, performing Post-Process on first file detected: ' + f)
|
||||
break
|
||||
shutil.move( f, comicpath )
|
||||
filename = f #just the filename itself
|
||||
|
@ -129,25 +141,56 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
|
|||
if filename.endswith('.cbr'):
|
||||
f = os.path.join( comicpath, filename )
|
||||
if zipfile.is_zipfile( f ):
|
||||
logger.fdebug('[META-TAGGING] zipfile detected')
|
||||
logger.fdebug(module + ' zipfile detected')
|
||||
base = os.path.splitext( f )[0]
|
||||
shutil.move( f, base + ".cbz" )
|
||||
logger.fdebug('[META-TAGGING] {0}: renaming {1} to be a cbz'.format( scriptname, os.path.basename( f ) ))
|
||||
logger.fdebug(module + ' {0}: renaming {1} to be a cbz'.format( scriptname, os.path.basename( f ) ))
|
||||
|
||||
if file_extension_fixing:
|
||||
if filename.endswith('.cbz'):
|
||||
logger.info(module + ' Filename detected as a .cbz file.')
|
||||
f = os.path.join( comicpath, filename )
|
||||
logger.fdebug(module + ' filename : ' + f)
|
||||
|
||||
if os.path.isfile( f ):
|
||||
try:
|
||||
rar_test_cmd_output = "is not RAR archive" #default, in case of error
|
||||
rar_test_cmd_output = subprocess.check_output( [ unrar_cmd, "t", f ] )
|
||||
except:
|
||||
pass
|
||||
logger.fdebug(module + ' This is a zipfile. Unable to test rar.')
|
||||
|
||||
if not "is not RAR archive" in rar_test_cmd_output:
|
||||
base = os.path.splitext( f )[0]
|
||||
shutil.move( f, base + ".cbr" )
|
||||
logger.fdebug('[META-TAGGING] {0}: renaming {1} to be a cbr'.format( scriptname, os.path.basename( f ) ))
|
||||
logger.fdebug(module + ' {0}: renaming {1} to be a cbr'.format( scriptname, os.path.basename( f ) ))
|
||||
else:
|
||||
try:
|
||||
with open(f): pass
|
||||
except:
|
||||
logger.warn(module + ' No zip file present')
|
||||
return "fail"
|
||||
|
||||
|
||||
base = os.path.join(re.sub(issueid, '', comicpath), filename) #extension is already .cbz
|
||||
logger.fdebug(module + ' Base set to : ' + base)
|
||||
logger.fdebug(module + ' Moving : ' + f + ' - to - ' + base)
|
||||
shutil.move( f, base)
|
||||
try:
|
||||
with open(base):
|
||||
logger.fdebug(module + ' Verified file exists in location: ' + base)
|
||||
removetemp = True
|
||||
except:
|
||||
logger.fdebug(module + ' Cannot verify file exist in location: ' + base)
|
||||
removetemp = False
|
||||
|
||||
if removetemp == True:
|
||||
if comicpath != downloadpath:
|
||||
shutil.rmtree( comicpath )
|
||||
logger.fdebug(module + ' Successfully removed temporary directory: ' + comicpath)
|
||||
else:
|
||||
loggger.fdebug(module + ' Unable to remove temporary directory since it is identical to the download location : ' + comicpath)
|
||||
logger.fdebug(module + ' new filename : ' + base)
|
||||
nfilename = base
|
||||
|
||||
# Now rename all CBR files to RAR
|
||||
if filename.endswith('.cbr'):
|
||||
|
@ -160,7 +203,7 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
|
|||
## Changes any cbr files to cbz files for insertion of metadata ##
|
||||
if file_conversion:
|
||||
f = os.path.join( comicpath, filename )
|
||||
logger.fdebug('[META-TAGGING] {0}: converting {1} to be zip format'.format( scriptname, os.path.basename( f ) ))
|
||||
logger.fdebug(module + ' {0}: converting {1} to be zip format'.format( scriptname, os.path.basename( f ) ))
|
||||
basename = os.path.splitext( f )[0]
|
||||
zipname = basename + ".cbz"
|
||||
|
||||
|
@ -169,17 +212,17 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
|
|||
os.chdir( unrar_folder )
|
||||
|
||||
# Extract and zip up
|
||||
logger.fdebug('[META-TAGGING] {0}: Comicpath is ' + baserar) #os.path.join(comicpath,basename))
|
||||
logger.fdebug('[META-TAGGING] {0}: Unrar is ' + unrar_folder )
|
||||
logger.fdebug(module + ' {0}: Comicpath is ' + baserar) #os.path.join(comicpath,basename))
|
||||
logger.fdebug(module + ' {0}: Unrar is ' + unrar_folder )
|
||||
try:
|
||||
#subprocess.Popen( [ unrar_cmd, "x", os.path.join(comicpath,basename) ] ).communicate()
|
||||
output = subprocess.check_output( [ unrar_cmd, 'x', baserar ] ) #os.path.join(comicpath,basename) ] )
|
||||
except CalledProcessError as e:
|
||||
if e.returncode == 3:
|
||||
logger.fdebug('[META-TAGGING] [Unrar Error 3] - Broken Archive.')
|
||||
logger.warn(module + ' [Unrar Error 3] - Broken Archive.')
|
||||
elif e.returncode == 1:
|
||||
logger.fdebug('[META-TAGGING] [Unrar Error 1] - No files to extract.')
|
||||
logger.fdebug('[META-TAGGING] Marking this as an incomplete download.')
|
||||
logger.warn(module + ' [Unrar Error 1] - No files to extract.')
|
||||
logger.warn(module + ' Marking this as an incomplete download.')
|
||||
return "unrar error"
|
||||
|
||||
shutil.make_archive( basename, "zip", unrar_folder )
|
||||
|
@ -195,27 +238,34 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
|
|||
try:
|
||||
with open(f): pass
|
||||
except:
|
||||
logger.warn('[META-TAGGING] No zip file present')
|
||||
logger.warn(module + ' No zip file present:' + f)
|
||||
return "fail"
|
||||
base = os.path.splitext( f )[0]
|
||||
shutil.move( f, base + ".cbz" )
|
||||
nfilename = base + ".cbz"
|
||||
else:
|
||||
logger.fdebug('[META-TAGGING] Filename:' + filename)
|
||||
nfilename = filename
|
||||
#else:
|
||||
# logger.fdebug(module + ' Filename:' + filename)
|
||||
# nfilename = filename
|
||||
|
||||
#if os.path.isfile( nfilename ):
|
||||
# logger.fdebug(module + ' File exists in given location already : ' + nfilename)
|
||||
# file_dir, file_n = os.path.split(nfilename)
|
||||
#else:
|
||||
# #remove the IssueID from the path
|
||||
# file_dir = re.sub(issueid, '', comicpath)
|
||||
# file_n = os.path.split(nfilename)[1]
|
||||
file_dir = re.sub(issueid, '', comicpath)
|
||||
file_n = os.path.split(nfilename)[1]
|
||||
logger.fdebug(module + ' Converted directory: ' + str(file_dir))
|
||||
logger.fdebug(module + ' Converted filename: ' + str(file_n))
|
||||
logger.fdebug(module + ' Destination path: ' + os.path.join(file_dir,file_n)) #dirName,file_n))
|
||||
logger.fdebug(module + ' dirName: ' + dirName)
|
||||
logger.fdebug(module + ' absDirName: ' + os.path.abspath(dirName))
|
||||
|
||||
##set up default comictagger options here.
|
||||
tagoptions = [ "-s", "--verbose" ]
|
||||
|
||||
|
||||
if os.path.isfile( nfilename ):
|
||||
logger.fdebug('[META-TAGGING] File exists in given location already.')
|
||||
file_dir, file_n = os.path.split(nfilename)
|
||||
else:
|
||||
#remove the IssueID from the path
|
||||
file_dir = re.sub(issueid, '', comicpath)
|
||||
file_n = os.path.split(nfilename)[1]
|
||||
logger.fdebug('[META-TAGGING] Converted directory: ' + str(file_dir))
|
||||
logger.fdebug('[META-TAGGING] Converted filename: ' + str(file_n))
|
||||
logger.fdebug('[META-TAGGING] Destination path: ' + os.path.join(dirName,file_n))
|
||||
logger.fdebug('[META-TAGGING] dirName: ' + dirName)
|
||||
logger.fdebug('[META-TAGGING] absDirName: ' + os.path.abspath(dirName))
|
||||
## check comictagger version - less than 1.15.beta - take your chances.
|
||||
ctversion = subprocess.check_output( [ comictagger_cmd, "--version" ] )
|
||||
ctend = ctversion.find(':')
|
||||
|
@ -223,13 +273,14 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
|
|||
ctcheck = re.sub('\.', '', ctcheck).strip()
|
||||
if int(ctcheck) >= int('1115'): #(v1.1.15)
|
||||
if mylar.COMICVINE_API == mylar.DEFAULT_CVAPI:
|
||||
logger.fdebug('[META-TAGGING] ' + ctversion[:ctend] + ' being used - no personal ComicVine API Key supplied. Take your chances.')
|
||||
logger.fdebug(module + ' ' + ctversion[:ctend] + ' being used - no personal ComicVine API Key supplied. Take your chances.')
|
||||
use_cvapi = "False"
|
||||
else:
|
||||
logger.fdebug('[META-TAGGING] ' + ctversion[:ctend] + ' being used - using personal ComicVine API key supplied via mylar.')
|
||||
logger.fdebug(module + ' ' + ctversion[:ctend] + ' being used - using personal ComicVine API key supplied via mylar.')
|
||||
use_cvapi = "True"
|
||||
tagoptions.extend( [ "--cv-api-key", mylar.COMICVINE_API ] )
|
||||
else:
|
||||
logger.fdebug('[META-TAGGING] ' + ctversion[:ctend] + ' being used - personal ComicVine API key not supported in this version. Good luck.')
|
||||
logger.fdebug(module + ' ' + ctversion[:ctend] + ' being used - personal ComicVine API key not supported in this version. Good luck.')
|
||||
use_cvapi = "False"
|
||||
|
||||
i = 1
|
||||
|
@ -237,69 +288,118 @@ def run (dirName, nzbName=None, issueid=None, manual=None, filename=None):
|
|||
|
||||
if mylar.CT_TAG_CR:
|
||||
tagcnt = 1
|
||||
logger.info('[META-TAGGING] CR Tagging enabled.')
|
||||
logger.fdebug(module + ' CR Tagging enabled.')
|
||||
|
||||
if mylar.CT_TAG_CBL:
|
||||
if not mylar.CT_TAG_CR: i = 2 #set the tag to start at cbl and end without doing another tagging.
|
||||
tagcnt = 2
|
||||
logger.info('[META-TAGGING] CBL Tagging enabled.')
|
||||
logger.fdebug(module + ' CBL Tagging enabled.')
|
||||
|
||||
if tagcnt == 0:
|
||||
logger.warn('[META-TAGGING] You have metatagging enabled, but you have not selected the type(s) of metadata to write. Please fix and re-run manually')
|
||||
logger.warn(module + ' You have metatagging enabled, but you have not selected the type(s) of metadata to write. Please fix and re-run manually')
|
||||
return "fail"
|
||||
|
||||
#if it's a cbz file - check if no-overwrite existing tags is enabled / disabled in config.
|
||||
if nfilename.endswith('.cbz'):
|
||||
if mylar.CT_CBZ_OVERWRITE:
|
||||
logger.fdebug(module + ' Will modify existing tag blocks even if it exists.')
|
||||
else:
|
||||
logger.fdebug(module + ' Will NOT modify existing tag blocks even if they exist already.')
|
||||
tagoptions.extend( [ "--nooverwrite" ] )
|
||||
|
||||
if issueid is None:
|
||||
tagoptions.extend( [ "-f", "-o" ] )
|
||||
else:
|
||||
tagoptions.extend( [ "-o", "--id", issueid ] )
|
||||
|
||||
original_tagoptions = tagoptions
|
||||
og_tagtype = None
|
||||
|
||||
while ( i <= tagcnt ):
|
||||
if i == 1:
|
||||
tagtype = "cr" # CR meta-tagging cycle.
|
||||
tagtype = 'cr' # CR meta-tagging cycle.
|
||||
tagdisp = 'ComicRack tagging'
|
||||
elif i == 2:
|
||||
tagtype = "cbl" #Cbl meta-tagging cycle
|
||||
tagtype = 'cbl' #Cbl meta-tagging cycle
|
||||
tagdisp = 'Comicbooklover tagging'
|
||||
logger.info('[META-TAGGING] ' + tagdisp + ' meta-tagging processing started.')
|
||||
|
||||
|
||||
f_tagoptions = original_tagoptions
|
||||
|
||||
if og_tagtype is not None:
|
||||
for index, item in enumerate(f_tagoptions):
|
||||
if item == og_tagtype:
|
||||
f_tagoptions[index] = tagtype
|
||||
else:
|
||||
f_tagoptions.extend( [ "--type", tagtype, nfilename ] )
|
||||
|
||||
og_tagtype = tagtype
|
||||
|
||||
logger.info(module + ' ' + tagdisp + ' meta-tagging processing started.')
|
||||
|
||||
#CV API Check here.
|
||||
if mylar.CVAPI_COUNT == 0 or mylar.CVAPI_COUNT >= 200:
|
||||
cvapi_check()
|
||||
|
||||
currentScriptName = str(comictagger_cmd).decode("string_escape")
|
||||
logger.fdebug(module + ' Enabling ComicTagger script: ' + str(currentScriptName) + ' with options: ' + str(f_tagoptions))
|
||||
# generate a safe command line string to execute the script and provide all the parameters
|
||||
script_cmd = shlex.split(currentScriptName, posix=False) + f_tagoptions
|
||||
|
||||
# use subprocess to run the command and capture output
|
||||
logger.fdebug(module + ' Executing command: '+str(script_cmd))
|
||||
logger.fdebug(module + ' Absolute path to script: '+script_cmd[0])
|
||||
try:
|
||||
p = subprocess.Popen(script_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
out, err = p.communicate() #@UnusedVariable
|
||||
logger.fdebug(module + '[COMIC-TAGGER] : '+str(out))
|
||||
logger.info(module + '[COMIC-TAGGER] Successfully wrote ' + tagdisp)
|
||||
except OSError, e:
|
||||
logger.warn(module + '[COMIC-TAGGER] Unable to run comictagger with the options provided: ' + str(script_cmd))
|
||||
|
||||
#increment CV API counter.
|
||||
mylar.CVAPI_COUNT +=1
|
||||
|
||||
|
||||
## Tag each CBZ, and move it back to original directory ##
|
||||
if use_cvapi == "True":
|
||||
if issueid is None:
|
||||
subprocess.Popen( [ comictagger_cmd, "-s", "-t", tagtype, "--cv-api-key", mylar.COMICVINE_API, "-f", "-o", "--verbose", "--nooverwrite", nfilename ] ).communicate()
|
||||
else:
|
||||
subprocess.Popen( [ comictagger_cmd, "-s", "-t", tagtype, "--cv-api-key", mylar.COMICVINE_API, "-o", "--id", issueid, "--verbose", "--nooverwrite", nfilename ] ).communicate()
|
||||
logger.info('[META-TAGGING] ' + tagtype + ' meta-tagging complete')
|
||||
#increment CV API counter.
|
||||
mylar.CVAPI_COUNT +=1
|
||||
else:
|
||||
if issueid is None:
|
||||
subprocess.Popen( [ comictagger_cmd, "-s", "-t", tagtype, "-f", "-o", "--verbose", "--nooverwrite", nfilename ] ).communicate()
|
||||
else:
|
||||
subprocess.Popen( [ comictagger_cmd, "-s", "-t", tagtype, "-o", "--id", issueid, "--verbose", "--nooverwrite", nfilename ] ).communicate()
|
||||
#increment CV API counter.
|
||||
mylar.CVAPI_COUNT +=1
|
||||
#if use_cvapi == "True":
|
||||
# if issueid is None:
|
||||
# subprocess.Popen( [ comictagger_cmd, "-s", "-t", tagtype, "--cv-api-key", mylar.COMICVINE_API, "-f", "-o", "--verbose", "--nooverwrite", nfilename ] ).communicate()
|
||||
# else:
|
||||
# subprocess.Popen( [ comictagger_cmd, "-s", "-t", tagtype, "--cv-api-key", mylar.COMICVINE_API, "-o", "--id", issueid, "--verbose", nfilename ] ).communicate()
|
||||
# logger.info(module + ' ' + tagdisp + ' meta-tagging complete')
|
||||
# #increment CV API counter.
|
||||
# mylar.CVAPI_COUNT +=1
|
||||
#else:
|
||||
# if issueid is None:
|
||||
# subprocess.Popen( [ comictagger_cmd, "-s", "-t", tagtype, "-f", "-o", "--verbose", "--nooverwrite", nfilename ] ).communicate()
|
||||
# else:
|
||||
# subprocess.Popen( [ comictagger_cmd, "-s", "-t", tagtype, "-o", "--id", issueid, "--verbose", "--nooverwrite", nfilename ] ).communicate()
|
||||
# #increment CV API counter.
|
||||
# mylar.CVAPI_COUNT +=1
|
||||
i+=1
|
||||
|
||||
if os.path.exists(os.path.join(os.path.abspath(dirName),file_n)):
|
||||
logger.fdebug('[META-TAGGING] Unable to move - file already exists.')
|
||||
if os.path.exists(os.path.join(os.path.abspath(file_dir),file_n)): #(os.path.abspath(dirName),file_n)):
|
||||
logger.fdebug(module + ' Unable to move from temporary directory - file already exists in destination: ' + os.path.join(os.path.abspath(file_dir),file_n))
|
||||
else:
|
||||
shutil.move( os.path.join(comicpath, nfilename), os.path.join(os.path.abspath(dirName),file_n))
|
||||
shutil.move( os.path.join(comicpath, nfilename), os.path.join(os.path.abspath(file_dir),file_n)) #os.path.abspath(dirName),file_n))
|
||||
#shutil.move( nfilename, os.path.join(os.path.abspath(dirName),file_n))
|
||||
logger.fdebug('[META-TAGGING] Sucessfully moved file from temporary path.')
|
||||
logger.fdebug(module + ' Sucessfully moved file from temporary path.')
|
||||
i = 0
|
||||
|
||||
os.chdir( mylar.PROG_DIR )
|
||||
|
||||
while i < 10:
|
||||
try:
|
||||
logger.fdebug('[META-TAGGING] Attempting to remove: ' + comicpath)
|
||||
logger.fdebug(module + ' Attempting to remove: ' + comicpath)
|
||||
shutil.rmtree( comicpath )
|
||||
except:
|
||||
time.sleep(.1)
|
||||
else:
|
||||
return os.path.join(os.path.abspath(dirName), file_n)
|
||||
return os.path.join(os.path.abspath(file_dir), file_n) #dirName), file_n)
|
||||
i+=1
|
||||
|
||||
logger.fdebug('[META-TAGGING] Failed to remove temporary path : ' + str(comicpath))
|
||||
logger.fdebug(module + ' Failed to remove temporary path : ' + str(comicpath))
|
||||
|
||||
return os.path.join(os.path.abspath(dirName),file_n)
|
||||
return os.path.join(os.path.abspath(file_dir),file_n) #dirName),file_n)
|
||||
|
||||
|
|
|
@ -55,7 +55,7 @@ def pulldetails(comicid,type,issueid=None,offset=1):
|
|||
PULLURL = mylar.CVURL + 'story_arc/?api_key=' + str(comicapi) + '&format=xml&filter=id:' + str(issueid) + '&field_list=cover_date'
|
||||
|
||||
#CV API Check here.
|
||||
if mylar.CVAPI_COUNT == 0 or mylar.CVAPI_COUNT >= 200:
|
||||
if mylar.CVAPI_COUNT == 0 or mylar.CVAPI_COUNT >= mylar.CVAPI_MAX:
|
||||
cvapi_check()
|
||||
#download the file:
|
||||
file = urllib2.urlopen(PULLURL)
|
||||
|
|
|
@ -788,24 +788,27 @@ def listFiles(dir,watchcomic,Publisher,AlternateSearch=None,manual=None,sarc=Non
|
|||
watchmatch['comiccount'] = comiccnt
|
||||
return watchmatch
|
||||
|
||||
def validateAndCreateDirectory(dir, create=False):
|
||||
def validateAndCreateDirectory(dir, create=False, module=None):
|
||||
if module is None:
|
||||
module = ''
|
||||
module += '[DIRECTORY-CHECK]'
|
||||
if os.path.exists(dir):
|
||||
logger.info('Found comic directory: ' + dir)
|
||||
logger.info(module + ' Found comic directory: ' + dir)
|
||||
return True
|
||||
else:
|
||||
logger.warn('Could not find comic directory: ' + dir)
|
||||
logger.warn(module + ' Could not find comic directory: ' + dir)
|
||||
if create:
|
||||
if dir.strip():
|
||||
logger.info('Creating comic directory (' + str(mylar.CHMOD_DIR) + ') : ' + dir)
|
||||
logger.info(module + ' Creating comic directory (' + str(mylar.CHMOD_DIR) + ') : ' + dir)
|
||||
try:
|
||||
permission = int(mylar.CHMOD_DIR, 8)
|
||||
os.umask(0) # this is probably redudant, but it doesn't hurt to clear the umask here.
|
||||
os.makedirs(dir.rstrip(), permission )
|
||||
except OSError:
|
||||
raise SystemExit('Could not create directory: ' + dir + '. Exiting....')
|
||||
raise SystemExit(module + ' Could not create directory: ' + dir + '. Exiting....')
|
||||
return True
|
||||
else:
|
||||
logger.warn('Provided directory is blank, aborting')
|
||||
logger.warn(module + ' Provided directory is blank, aborting')
|
||||
return False
|
||||
return False
|
||||
|
||||
|
|
|
@ -1075,12 +1075,15 @@ def havetotals(refreshit=None):
|
|||
def cvapi_check(web=None):
|
||||
import logger
|
||||
if web is None: logger.fdebug('[ComicVine API] ComicVine API Check Running...')
|
||||
if mylar.CVAPI_TIME is None:
|
||||
if mylar.CVAPI_TIME is None or mylar.CVAPI_TIME == '':
|
||||
c_date = now()
|
||||
c_obj_date = datetime.datetime.strptime(c_date,"%Y-%m-%d %H:%M:%S")
|
||||
mylar.CVAPI_TIME = c_obj_date
|
||||
else:
|
||||
c_obj_date = mylar.CVAPI_TIME
|
||||
if isinstance(mylar.CVAPI_TIME, unicode):
|
||||
c_obj_date = datetime.datetime.strptime(mylar.CVAPI_TIME,"%Y-%m-%d %H:%M:%S")
|
||||
else:
|
||||
c_obj_date = mylar.CVAPI_TIME
|
||||
if web is None: logger.fdebug('[ComicVine API] API Start Monitoring Time (~15mins): ' + str(mylar.CVAPI_TIME))
|
||||
now_date = now()
|
||||
n_date = datetime.datetime.strptime(now_date,"%Y-%m-%d %H:%M:%S")
|
||||
|
@ -1088,10 +1091,10 @@ def cvapi_check(web=None):
|
|||
absdiff = abs(n_date - c_obj_date)
|
||||
mins = round(((absdiff.days * 24 * 60 * 60 + absdiff.seconds) / 60.0),2)
|
||||
if mins < 15:
|
||||
if web is None: logger.info('[ComicVine API] Comicvine API count now at : ' + str(mylar.CVAPI_COUNT) + ' in ' + str(mins) + ' minutes.')
|
||||
if mylar.CVAPI_COUNT > 200:
|
||||
if web is None: logger.info('[ComicVine API] Comicvine API count now at : ' + str(mylar.CVAPI_COUNT) + ' / ' + str(mylar.CVAPI_MAX) + ' in ' + str(mins) + ' minutes.')
|
||||
if mylar.CVAPI_COUNT > mylar.CVAPI_MAX:
|
||||
cvleft = 15 - mins
|
||||
if web is None: logger.warn('[ComicVine API] You have already hit your API limit with ' + str(cvleft) + ' minutes. Best be slowing down, cowboy.')
|
||||
if web is None: logger.warn('[ComicVine API] You have already hit your API limit (' + str(mylar.CVAPI_MAX) + ' with ' + str(cvleft) + ' minutes. Best be slowing down, cowboy.')
|
||||
elif mins > 15:
|
||||
mylar.CVAPI_COUNT = 0
|
||||
c_date = now()
|
||||
|
|
|
@ -932,6 +932,9 @@ def GCDimport(gcomicid, pullupd=None,imported=None,ogcname=None):
|
|||
def issue_collection(issuedata,nostatus):
|
||||
myDB = db.DBConnection()
|
||||
|
||||
nowdate = datetime.datetime.now()
|
||||
nowtime = nowdate.strftime("%Y%m%d")
|
||||
|
||||
if issuedata:
|
||||
for issue in issuedata:
|
||||
|
||||
|
@ -958,13 +961,15 @@ def issue_collection(issuedata,nostatus):
|
|||
#logger.fdebug('issue #' + str(issue['Issue_Number']) + 'does not exist in db.')
|
||||
if mylar.AUTOWANT_ALL:
|
||||
newValueDict['Status'] = "Wanted"
|
||||
elif issue['IssueDate'] > helpers.today() and mylar.AUTOWANT_UPCOMING:
|
||||
#logger.fdebug('autowant all')
|
||||
elif re.sub('-', '', issue['ReleaseDate']).strip() > nowtime and mylar.AUTOWANT_UPCOMING:
|
||||
#logger.fdebug(str(re.sub('-', '', issue['ReleaseDate']).strip()) + ' > ' + str(nowtime))
|
||||
newValueDict['Status'] = "Wanted"
|
||||
else:
|
||||
newValueDict['Status'] = "Skipped"
|
||||
|
||||
#logger.fdebug('status is : ' + str(newValueDict))
|
||||
else:
|
||||
#logger.info('Existing status for issue #' + str(issue['Issue_Number']) + ' : ' + str(iss_exists['Status']))
|
||||
#logger.fdebug('Existing status for issue #' + str(issue['Issue_Number']) + ' : ' + str(iss_exists['Status']))
|
||||
newValueDict['Status'] = iss_exists['Status']
|
||||
|
||||
else:
|
||||
|
|
|
@ -40,7 +40,7 @@ def pullsearch(comicapi,comicquery,offset,explicit):
|
|||
|
||||
#all these imports are standard on most modern python implementations
|
||||
#CV API Check here.
|
||||
if mylar.CVAPI_COUNT == 0 or mylar.CVAPI_COUNT >= 200:
|
||||
if mylar.CVAPI_COUNT == 0 or mylar.CVAPI_COUNT >= mylar.CVAPI_MAX:
|
||||
cvapi_check()
|
||||
#download the file:
|
||||
try:
|
||||
|
|
|
@ -42,10 +42,14 @@ class PROWL:
|
|||
def conf(self, options):
|
||||
return cherrypy.config['config'].get('Prowl', options)
|
||||
|
||||
def notify(self, message, event):
|
||||
def notify(self, message, event, module=None):
|
||||
if not mylar.PROWL_ENABLED:
|
||||
return
|
||||
|
||||
if module is None:
|
||||
module = ''
|
||||
module += '[NOTIFIER]'
|
||||
|
||||
http_handler = HTTPSConnection("api.prowlapp.com")
|
||||
|
||||
data = {'apikey': mylar.PROWL_KEYS,
|
||||
|
@ -62,13 +66,13 @@ class PROWL:
|
|||
request_status = response.status
|
||||
|
||||
if request_status == 200:
|
||||
logger.info(u"Prowl notifications sent.")
|
||||
logger.info(module + ' Prowl notifications sent.')
|
||||
return True
|
||||
elif request_status == 401:
|
||||
logger.info(u"Prowl auth failed: %s" % response.reason)
|
||||
logger.info(module + ' Prowl auth failed: %s' % response.reason)
|
||||
return False
|
||||
else:
|
||||
logger.info(u"Prowl notification failed.")
|
||||
logger.info(module + ' Prowl notification failed.')
|
||||
return False
|
||||
|
||||
def updateLibrary(self):
|
||||
|
@ -90,7 +94,7 @@ class NMA:
|
|||
self.apikey = mylar.NMA_APIKEY
|
||||
self.priority = mylar.NMA_PRIORITY
|
||||
|
||||
def _send(self, data):
|
||||
def _send(self, data, module):
|
||||
|
||||
url_data = urllib.urlencode(data)
|
||||
url = 'https://www.notifymyandroid.com/publicapi/notify'
|
||||
|
@ -100,15 +104,19 @@ class NMA:
|
|||
try:
|
||||
handle = urllib2.urlopen(req)
|
||||
except Exception, e:
|
||||
logger.warn('Error opening NotifyMyAndroid url: ' % e)
|
||||
logger.warn(module + ' Error opening NotifyMyAndroid url: ' % e)
|
||||
return
|
||||
|
||||
response = handle.read().decode(mylar.SYS_ENCODING)
|
||||
|
||||
return response
|
||||
|
||||
def notify(self, snline=None, prline=None, prline2=None, snatched_nzb=None, sent_to=None, prov=None):
|
||||
|
||||
def notify(self, snline=None, prline=None, prline2=None, snatched_nzb=None, sent_to=None, prov=None, module=None):
|
||||
|
||||
if module is None:
|
||||
module = ''
|
||||
module += '[NOTIFIER]'
|
||||
|
||||
apikey = self.apikey
|
||||
priority = self.priority
|
||||
|
||||
|
@ -122,12 +130,12 @@ class NMA:
|
|||
|
||||
data = { 'apikey': apikey, 'application':'Mylar', 'event': event, 'description': description, 'priority': priority}
|
||||
|
||||
logger.info('Sending notification request to NotifyMyAndroid')
|
||||
request = self._send(data)
|
||||
logger.info(module + ' Sending notification request to NotifyMyAndroid')
|
||||
request = self._send(data,module)
|
||||
|
||||
if not request:
|
||||
logger.warn('Error sending notification request to NotifyMyAndroid')
|
||||
|
||||
logger.warn(module + ' Error sending notification request to NotifyMyAndroid')
|
||||
|
||||
# 2013-04-01 Added Pushover.net notifications, based on copy of Prowl class above.
|
||||
# No extra care has been put into API friendliness at the moment (read: https://pushover.net/api#friendly)
|
||||
class PUSHOVER:
|
||||
|
@ -151,9 +159,12 @@ class PUSHOVER:
|
|||
#def conf(self, options):
|
||||
# return cherrypy.config['config'].get('Pushover', options)
|
||||
|
||||
def notify(self, message, event):
|
||||
def notify(self, message, event, module=None):
|
||||
if not mylar.PUSHOVER_ENABLED:
|
||||
return
|
||||
if module is None:
|
||||
module = ''
|
||||
module += '[NOTIFIER]'
|
||||
|
||||
http_handler = HTTPSConnection("api.pushover.net:443")
|
||||
|
||||
|
@ -172,13 +183,13 @@ class PUSHOVER:
|
|||
request_status = response.status
|
||||
|
||||
if request_status == 200:
|
||||
logger.info(u"Pushover notifications sent.")
|
||||
logger.info(module + ' Pushover notifications sent.')
|
||||
return True
|
||||
elif request_status == 401:
|
||||
logger.info(u"Pushover auth failed: %s" % response.reason)
|
||||
logger.info(module + 'Pushover auth failed: %s' % response.reason)
|
||||
return False
|
||||
else:
|
||||
logger.info(u"Pushover notification failed.")
|
||||
logger.info(module + ' Pushover notification failed.')
|
||||
return False
|
||||
|
||||
def test(self, apikey, userkey, priority):
|
||||
|
@ -198,7 +209,7 @@ class BOXCAR:
|
|||
|
||||
self.url = 'https://new.boxcar.io/api/notifications'
|
||||
|
||||
def _sendBoxcar(self, msg, title):
|
||||
def _sendBoxcar(self, msg, title, module):
|
||||
|
||||
"""
|
||||
Sends a boxcar notification to the address provided
|
||||
|
@ -226,19 +237,19 @@ class BOXCAR:
|
|||
except urllib2.URLError, e:
|
||||
# if we get an error back that doesn't have an error code then who knows what's really happening
|
||||
if not hasattr(e, 'code'):
|
||||
logger.error('Boxcar2 notification failed. %s' % e)
|
||||
logger.error(module + 'Boxcar2 notification failed. %s' % e)
|
||||
# If you receive an HTTP status code of 400, it is because you failed to send the proper parameters
|
||||
elif e.code == 400:
|
||||
logger.info("Wrong data sent to boxcar")
|
||||
logger.info('data:' + data)
|
||||
logger.info(module + ' Wrong data sent to boxcar')
|
||||
logger.info(module + ' data:' + data)
|
||||
else:
|
||||
logger.error("Boxcar2 notification failed. Error code: " + str(e.code))
|
||||
logger.error(module + ' Boxcar2 notification failed. Error code: ' + str(e.code))
|
||||
return False
|
||||
|
||||
logger.fdebug("Boxcar2 notification successful.")
|
||||
logger.fdebug(module + ' Boxcar2 notification successful.')
|
||||
return True
|
||||
|
||||
def notify(self, ComicName=None, Year=None, Issue=None, sent_to=None, snatched_nzb=None, force=False):
|
||||
def notify(self, ComicName=None, Year=None, Issue=None, sent_to=None, snatched_nzb=None, force=False, module=None):
|
||||
"""
|
||||
Sends a boxcar notification based on the provided info or SB config
|
||||
|
||||
|
@ -246,9 +257,12 @@ class BOXCAR:
|
|||
message: The message string to send
|
||||
force: If True then the notification will be sent even if Boxcar is disabled in the config
|
||||
"""
|
||||
if module is None:
|
||||
module = ''
|
||||
module += '[NOTIFIER]'
|
||||
|
||||
if not mylar.BOXCAR_ENABLED and not force:
|
||||
logger.fdebug("Notification for Boxcar not enabled, skipping this notification")
|
||||
logger.fdebug(module + ' Notification for Boxcar not enabled, skipping this notification.')
|
||||
return False
|
||||
|
||||
# if no username was given then use the one from the config
|
||||
|
@ -260,9 +274,9 @@ class BOXCAR:
|
|||
message = "Mylar has downloaded and postprocessed: " + ComicName + ' (' + Year + ') #' + Issue
|
||||
|
||||
|
||||
logger.info('Sending notification to Boxcar2')
|
||||
logger.info(module + ' Sending notification to Boxcar2')
|
||||
|
||||
self._sendBoxcar(message, title)
|
||||
self._sendBoxcar(message, title, module)
|
||||
return True
|
||||
|
||||
class PUSHBULLET:
|
||||
|
@ -271,9 +285,12 @@ class PUSHBULLET:
|
|||
self.apikey = mylar.PUSHBULLET_APIKEY
|
||||
self.deviceid = mylar.PUSHBULLET_DEVICEID
|
||||
|
||||
def notify(self, snline=None, prline=None, prline2=None, snatched=None, sent_to=None, prov=None):
|
||||
def notify(self, snline=None, prline=None, prline2=None, snatched=None, sent_to=None, prov=None, module=None):
|
||||
if not mylar.PUSHBULLET_ENABLED:
|
||||
return
|
||||
if module is None:
|
||||
module = ''
|
||||
module += '[NOTIFIER]'
|
||||
|
||||
if snatched:
|
||||
if snatched[-1] == '.': snatched = snatched[:-1]
|
||||
|
@ -303,13 +320,13 @@ class PUSHBULLET:
|
|||
#logger.debug(u"PushBullet response body: %r" % response.read())
|
||||
|
||||
if request_status == 200:
|
||||
logger.fdebug(u"PushBullet notifications sent.")
|
||||
logger.fdebug(module + ' PushBullet notifications sent.')
|
||||
return True
|
||||
elif request_status >= 400 and request_status < 500:
|
||||
logger.error(u"PushBullet request failed: %s" % response.reason)
|
||||
logger.error(module + ' PushBullet request failed: %s' % response.reason)
|
||||
return False
|
||||
else:
|
||||
logger.error(u"PushBullet notification failed serverside.")
|
||||
logger.error(module + ' PushBullet notification failed serverside.')
|
||||
return False
|
||||
|
||||
def test(self, apikey, deviceid):
|
||||
|
|
|
@ -34,7 +34,7 @@ import email.utils
|
|||
import datetime
|
||||
from wsgiref.handlers import format_date_time
|
||||
|
||||
def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, IssueID, AlternateSearch=None, UseFuzzy=None, ComicVersion=None, SARC=None, IssueArcID=None, mode=None, rsscheck=None, ComicID=None):
|
||||
def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, IssueID, AlternateSearch=None, UseFuzzy=None, ComicVersion=None, SARC=None, IssueArcID=None, mode=None, rsscheck=None, ComicID=None, manualsearch=None):
|
||||
if ComicYear == None: ComicYear = '2014'
|
||||
else: ComicYear = str(ComicYear)[:4]
|
||||
if Publisher == 'IDW Publishing': Publisher = 'IDW'
|
||||
|
@ -234,12 +234,12 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
|
|||
if findit == 'yes':
|
||||
return findit, searchprov
|
||||
else:
|
||||
logger.fdebug("Finished searching via : " + str(searchmode))
|
||||
if manualsearch is None:
|
||||
logger.info('Finished searching via :' + str(searchmode) + '. Issue not found - status kept as Wanted.')
|
||||
else:
|
||||
logger.info('Could not find issue doing a manual search via : ' + str(searchmode))
|
||||
i+=1
|
||||
|
||||
if findit == 'no':
|
||||
logger.info('Issue not found. Status kept as Wanted.')
|
||||
|
||||
return findit, 'None'
|
||||
|
||||
def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDate, StoreDate, nzbprov, prov_count, IssDateFix, IssueID, UseFuzzy, newznab_host=None, ComicVersion=None, SARC=None, IssueArcID=None, RSS=None, ComicID=None):
|
||||
|
@ -1210,11 +1210,16 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueDa
|
|||
logger.fdebug("link to retrieve via api:" + str(helpers.apiremove(linkapi,'$')))
|
||||
|
||||
#let's change all space to decimals for simplicity
|
||||
nzbname = re.sub(" ", ".", str(entry['title']))
|
||||
nzbname = re.sub('\s+',' ', entry['title']) #make sure we remove the extra spaces.
|
||||
logger.fdebug('[SEARCHER] entry[title]: ' + entry['title'])
|
||||
logger.fdebug('[SEARCHER] nzbname (\s): ' + nzbname)
|
||||
nzbname = re.sub(' ', '.', nzbname)
|
||||
logger.fdebug('[SEARCHER] nzbname (space to .): ' + nzbname)
|
||||
#gotta replace & or escape it
|
||||
nzbname = re.sub("\&", 'and', str(nzbname))
|
||||
nzbname = re.sub('[\,\:\?]', '', str(nzbname))
|
||||
nzbname = re.sub("\&", 'and', nzbname)
|
||||
nzbname = re.sub('[\,\:\?]', '', nzbname)
|
||||
extensions = ('.cbr', '.cbz')
|
||||
logger.fdebug('[SEARCHER] end nzbname: ' + nzbname)
|
||||
|
||||
if nzbname.lower().endswith(extensions):
|
||||
fd, ext = os.path.splitext(nzbname)
|
||||
|
|
100
mylar/updater.py
100
mylar/updater.py
|
@ -434,7 +434,7 @@ def nzblog(IssueID, NZBName, ComicName, SARC=None, IssueArcID=None):
|
|||
#print newValue
|
||||
myDB.upsert("nzblog", newValue, controlValue)
|
||||
|
||||
def foundsearch(ComicID, IssueID, mode=None, down=None, provider=None, SARC=None, IssueArcID=None):
|
||||
def foundsearch(ComicID, IssueID, mode=None, down=None, provider=None, SARC=None, IssueArcID=None, module=None):
|
||||
# When doing a Force Search (Wanted tab), the resulting search calls this to update.
|
||||
|
||||
# this is all redudant code that forceRescan already does.
|
||||
|
@ -442,10 +442,14 @@ def foundsearch(ComicID, IssueID, mode=None, down=None, provider=None, SARC=None
|
|||
# series directory, it just scans for the issue it just downloaded and
|
||||
# and change the status to Snatched accordingly. It is not to increment the have count
|
||||
# at this stage as it's not downloaded - just the .nzb has been snatched and sent to SAB.
|
||||
if module is None:
|
||||
module = ''
|
||||
module += '[UPDATER]'
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
logger.info('comicid: ' + str(ComicID))
|
||||
logger.info('issueid: ' + str(IssueID))
|
||||
logger.fdebug(module + ' comicid: ' + str(ComicID))
|
||||
logger.fdebug(module + ' issueid: ' + str(IssueID))
|
||||
if mode != 'story_arc':
|
||||
comic = myDB.selectone('SELECT * FROM comics WHERE ComicID=?', [ComicID]).fetchone()
|
||||
ComicName = comic['ComicName']
|
||||
|
@ -462,8 +466,8 @@ def foundsearch(ComicID, IssueID, mode=None, down=None, provider=None, SARC=None
|
|||
|
||||
if down is None:
|
||||
# update the status to Snatched (so it won't keep on re-downloading!)
|
||||
logger.fdebug('updating status to snatched')
|
||||
logger.fdebug('provider is ' + provider)
|
||||
logger.info(module + ' Updating status to snatched')
|
||||
logger.fdebug(module + ' Provider is ' + provider)
|
||||
newValue = {"Status": "Snatched"}
|
||||
if mode == 'story_arc':
|
||||
cValue = {"IssueArcID": IssueArcID}
|
||||
|
@ -510,13 +514,13 @@ def foundsearch(ComicID, IssueID, mode=None, down=None, provider=None, SARC=None
|
|||
"Status": "Snatched"
|
||||
}
|
||||
myDB.upsert("snatched", newsnatchValues, snatchedupdate)
|
||||
logger.info("updated the snatched.")
|
||||
logger.info(module + ' Updated the status (Snatched) complete for ' + ComicName + ' Issue: ' + str(IssueNum))
|
||||
else:
|
||||
if down == 'PP':
|
||||
logger.fdebug('setting status to Post-Processed in history.')
|
||||
logger.info(module + ' Setting status to Post-Processed in history.')
|
||||
downstatus = 'Post-Processed'
|
||||
else:
|
||||
logger.fdebug('setting status to Downloaded in history.')
|
||||
logger.info(module + ' Setting status to Downloaded in history.')
|
||||
downstatus = 'Downloaded'
|
||||
if mode == 'want_ann':
|
||||
IssueNum = "Annual " + issue['Issue_Number']
|
||||
|
@ -549,15 +553,17 @@ def foundsearch(ComicID, IssueID, mode=None, down=None, provider=None, SARC=None
|
|||
|
||||
myDB.upsert("issues", newValue, controlValue)
|
||||
|
||||
#print ("finished updating snatched db.")
|
||||
logger.info('Updating now complete for ' + ComicName + ' issue: ' + str(IssueNum))
|
||||
logger.info(module + ' Updating Status (' + downstatus + ') now complete for ' + ComicName + ' issue: ' + str(IssueNum))
|
||||
return
|
||||
|
||||
def forceRescan(ComicID,archive=None):
|
||||
def forceRescan(ComicID,archive=None,module=None):
|
||||
if module is None:
|
||||
module = ''
|
||||
module += '[FILE-RESCAN]'
|
||||
myDB = db.DBConnection()
|
||||
# file check to see if issue exists
|
||||
rescan = myDB.selectone('SELECT * FROM comics WHERE ComicID=?', [ComicID]).fetchone()
|
||||
logger.info('Now checking files for ' + rescan['ComicName'] + ' (' + str(rescan['ComicYear']) + ') in ' + rescan['ComicLocation'] )
|
||||
logger.info(module + ' Now checking files for ' + rescan['ComicName'] + ' (' + str(rescan['ComicYear']) + ') in ' + rescan['ComicLocation'] )
|
||||
if archive is None:
|
||||
fc = filechecker.listFiles(dir=rescan['ComicLocation'], watchcomic=rescan['ComicName'], Publisher=rescan['ComicPublisher'], AlternateSearch=rescan['AlternateSearch'])
|
||||
else:
|
||||
|
@ -587,8 +593,8 @@ def forceRescan(ComicID,archive=None):
|
|||
try:
|
||||
tmpfc = fc['comiclist'][fn]
|
||||
except IndexError:
|
||||
logger.fdebug('Unable to properly retrieve a file listing for the given series.')
|
||||
logger.fdebug('Probably because the filenames being scanned are not in a parseable format')
|
||||
logger.fdebug(module + ' Unable to properly retrieve a file listing for the given series.')
|
||||
logger.fdebug(module + ' Probably because the filenames being scanned are not in a parseable format')
|
||||
if fn == 0:
|
||||
return
|
||||
else:
|
||||
|
@ -597,12 +603,12 @@ def forceRescan(ComicID,archive=None):
|
|||
|
||||
# temploc = tmpfc['ComicFilename'].replace('_', ' ')
|
||||
temploc = re.sub('[\#\']', '', temploc)
|
||||
logger.fdebug('temploc: ' + str(temploc))
|
||||
logger.fdebug(module + ' temploc: ' + str(temploc))
|
||||
if 'annual' not in temploc.lower():
|
||||
#remove the extension here
|
||||
extensions = ('.cbr','.cbz')
|
||||
if temploc.lower().endswith(extensions):
|
||||
logger.fdebug('removed extension for issue: ' + str(temploc))
|
||||
logger.fdebug(module + ' Removed extension for issue: ' + str(temploc))
|
||||
temploc = temploc[:-4]
|
||||
# deccnt = str(temploc).count('.')
|
||||
# if deccnt > 1:
|
||||
|
@ -688,12 +694,12 @@ def forceRescan(ComicID,archive=None):
|
|||
#if the 'AU' is in 005AU vs 005 AU it will yield different results.
|
||||
fcnew[som] = fcnew[som] + 'AU'
|
||||
fcnew[som+1] = '93939999919190933'
|
||||
logger.info('AU Detected seperate from issue - combining and continuing')
|
||||
logger.info(module + ' AU Detected seperate from issue - combining and continuing')
|
||||
elif 'ai' in fcnew[som+1].lower():
|
||||
#if the 'AI' is in 005AI vs 005 AI it will yield different results.
|
||||
fcnew[som] = fcnew[som] + 'AI'
|
||||
fcnew[som+1] = '93939999919190933'
|
||||
logger.info('AI Detected seperate from issue - combining and continuing')
|
||||
logger.info(module + ' AI Detected seperate from issue - combining and continuing')
|
||||
|
||||
#sometimes scanners refuse to use spaces between () and lump the issue right at the start
|
||||
#mylar assumes it's all one word in this case..let's dump the brackets.
|
||||
|
@ -704,27 +710,27 @@ def forceRescan(ComicID,archive=None):
|
|||
#logger.fdebug("int_iss: " + str(int_iss))
|
||||
|
||||
if int(fcdigit) == int_iss:
|
||||
logger.fdebug('issue match - fcdigit: ' + str(fcdigit) + ' ... int_iss: ' + str(int_iss))
|
||||
logger.fdebug(module + ' Issue match - fcdigit: ' + str(fcdigit) + ' ... int_iss: ' + str(int_iss))
|
||||
|
||||
if '-' in temploc and temploc.find(reiss['Issue_Number']) > temploc.find('-'):
|
||||
logger.fdebug('I have detected a possible Title in the filename')
|
||||
logger.fdebug('the issue # has occured after the -, so I assume that it is part of the Title')
|
||||
logger.fdebug(module + ' I have detected a possible Title in the filename')
|
||||
logger.fdebug(module + ' the issue # has occured after the -, so I assume that it is part of the Title')
|
||||
break
|
||||
for d in issuedupechk:
|
||||
if int(d['fcdigit']) == int(fcdigit):
|
||||
logger.fdebug('duplicate issue detected - not counting this: ' + str(tmpfc['ComicFilename']))
|
||||
logger.fdebug('is a duplicate of ' + d['filename'])
|
||||
logger.fdebug(module + ' Duplicate issue detected - not counting this: ' + str(tmpfc['ComicFilename']))
|
||||
logger.fdebug(module + ' is a duplicate of ' + d['filename'])
|
||||
logger.fdebug('fcdigit:' + str(fcdigit) + ' === dupedigit: ' + str(d['fcdigit']))
|
||||
issuedupe = "yes"
|
||||
break
|
||||
if issuedupe == "no":
|
||||
logger.fdebug('matched...issue: ' + rescan['ComicName'] + '#' + str(reiss['Issue_Number']) + ' --- ' + str(int_iss))
|
||||
logger.fdebug(module + ' Matched...issue: ' + rescan['ComicName'] + '#' + str(reiss['Issue_Number']) + ' --- ' + str(int_iss))
|
||||
havefiles+=1
|
||||
haveissue = "yes"
|
||||
isslocation = str(tmpfc['ComicFilename'])
|
||||
issSize = str(tmpfc['ComicSize'])
|
||||
logger.fdebug('.......filename: ' + str(isslocation))
|
||||
logger.fdebug('.......filesize: ' + str(tmpfc['ComicSize']))
|
||||
logger.fdebug(module + ' .......filename: ' + str(isslocation))
|
||||
logger.fdebug(module + ' .......filesize: ' + str(tmpfc['ComicSize']))
|
||||
# to avoid duplicate issues which screws up the count...let's store the filename issues then
|
||||
# compare earlier...
|
||||
issuedupechk.append({'fcdigit': int(fcdigit),
|
||||
|
@ -735,7 +741,7 @@ def forceRescan(ComicID,archive=None):
|
|||
# determine a way to match on year if present, or no year (currently).
|
||||
|
||||
if issuedupe == "yes":
|
||||
logger.fdebug('I should break out here because of a dupe.')
|
||||
logger.fdebug(module + ' I should break out here because of a dupe.')
|
||||
break
|
||||
som+=1
|
||||
if haveissue == "yes" or issuedupe == "yes": break
|
||||
|
@ -775,23 +781,23 @@ def forceRescan(ComicID,archive=None):
|
|||
ann_iss = fcnew[som+1]
|
||||
logger.fdebug('Annual # ' + str(ann_iss) + ' detected.')
|
||||
fcdigit = helpers.issuedigits(ann_iss)
|
||||
logger.fdebug('fcdigit:' + str(fcdigit))
|
||||
logger.fdebug('int_iss:' + str(int_iss))
|
||||
logger.fdebug(module + ' fcdigit:' + str(fcdigit))
|
||||
logger.fdebug(module + ' int_iss:' + str(int_iss))
|
||||
if int(fcdigit) == int_iss:
|
||||
logger.fdebug('annual match - issue : ' + str(int_iss))
|
||||
logger.fdebug(module + ' Annual match - issue : ' + str(int_iss))
|
||||
for d in annualdupechk:
|
||||
if int(d['fcdigit']) == int(fcdigit):
|
||||
logger.fdebug('duplicate annual issue detected - not counting this: ' + str(tmpfc['ComicFilename']))
|
||||
logger.fdebug(module + ' Duplicate annual issue detected - not counting this: ' + str(tmpfc['ComicFilename']))
|
||||
issuedupe = "yes"
|
||||
break
|
||||
if issuedupe == "no":
|
||||
logger.fdebug('matched...annual issue: ' + rescan['ComicName'] + '#' + str(reann['Issue_Number']) + ' --- ' + str(int_iss))
|
||||
logger.fdebug(module + ' Matched...annual issue: ' + rescan['ComicName'] + '#' + str(reann['Issue_Number']) + ' --- ' + str(int_iss))
|
||||
havefiles+=1
|
||||
haveissue = "yes"
|
||||
isslocation = str(tmpfc['ComicFilename'])
|
||||
issSize = str(tmpfc['ComicSize'])
|
||||
logger.fdebug('.......filename: ' + str(isslocation))
|
||||
logger.fdebug('.......filesize: ' + str(tmpfc['ComicSize']))
|
||||
logger.fdebug(module + ' .......filename: ' + str(isslocation))
|
||||
logger.fdebug(module + ' .......filesize: ' + str(tmpfc['ComicSize']))
|
||||
# to avoid duplicate issues which screws up the count...let's store the filename issues then
|
||||
# compare earlier...
|
||||
annualdupechk.append({'fcdigit': int(fcdigit)})
|
||||
|
@ -809,7 +815,7 @@ def forceRescan(ComicID,archive=None):
|
|||
if mylar.ANNUALS_ON:
|
||||
if 'annual' in temploc.lower():
|
||||
if reann is None:
|
||||
logger.fdebug('Annual present in location, but series does not have any annuals attached to it - Ignoring')
|
||||
logger.fdebug(module + ' Annual present in location, but series does not have any annuals attached to it - Ignoring')
|
||||
writeit = False
|
||||
else:
|
||||
iss_id = reann['IssueID']
|
||||
|
@ -817,13 +823,13 @@ def forceRescan(ComicID,archive=None):
|
|||
iss_id = reiss['IssueID']
|
||||
else:
|
||||
if 'annual' in temploc.lower():
|
||||
logger.fdebug('Annual support not enabled, but annual issue present within directory. Ignoring annual.')
|
||||
logger.fdebug(module + ' Annual support not enabled, but annual issue present within directory. Ignoring annual.')
|
||||
writeit = False
|
||||
else:
|
||||
iss_id = reiss['IssueID']
|
||||
|
||||
if writeit == True:
|
||||
logger.fdebug('issueID to write to db:' + str(iss_id))
|
||||
logger.fdebug(module + ' issueID to write to db:' + str(iss_id))
|
||||
controlValueDict = {"IssueID": iss_id}
|
||||
|
||||
#if Archived, increase the 'Have' count.
|
||||
|
@ -856,7 +862,7 @@ def forceRescan(ComicID,archive=None):
|
|||
# logger.info('writing ' + str(iss))
|
||||
# writethis = myDB.upsert(iss['tableName'], iss['valueDict'], iss['keyDict'])
|
||||
|
||||
logger.fdebug('IssueID to ignore: ' + str(issID_to_ignore))
|
||||
logger.fdebug(module + ' IssueID to ignore: ' + str(issID_to_ignore))
|
||||
|
||||
#here we need to change the status of the ones we DIDN'T FIND above since the loop only hits on FOUND issues.
|
||||
update_iss = []
|
||||
|
@ -900,9 +906,9 @@ def forceRescan(ComicID,archive=None):
|
|||
newStatusValue = {"Status": ui['Status']}
|
||||
myDB.upsert("issues", newStatusValue, controlValueDict)
|
||||
i+=1
|
||||
logger.info('Updated the status of ' + str(i) + ' issues for ' + rescan['ComicName'] + ' (' + str(rescan['ComicYear']) + ') that were not found.')
|
||||
logger.info(module + ' Updated the status of ' + str(i) + ' issues for ' + rescan['ComicName'] + ' (' + str(rescan['ComicYear']) + ') that were not found.')
|
||||
|
||||
logger.info('Total files located: ' + str(havefiles))
|
||||
logger.info(module + ' Total files located: ' + str(havefiles))
|
||||
foundcount = havefiles
|
||||
arcfiles = 0
|
||||
arcanns = 0
|
||||
|
@ -918,7 +924,7 @@ def forceRescan(ComicID,archive=None):
|
|||
if arcfiles > 0 or arcanns > 0:
|
||||
arcfiles = arcfiles + arcanns
|
||||
havefiles = havefiles + arcfiles
|
||||
logger.fdebug('Adjusting have total to ' + str(havefiles) + ' because of this many archive files:' + str(arcfiles))
|
||||
logger.fdebug(module + ' Adjusting have total to ' + str(havefiles) + ' because of this many archive files:' + str(arcfiles))
|
||||
|
||||
ignorecount = 0
|
||||
if mylar.IGNORE_HAVETOTAL: # if this is enabled, will increase Have total as if in Archived Status
|
||||
|
@ -926,7 +932,7 @@ def forceRescan(ComicID,archive=None):
|
|||
if int(ignores[0][0]) > 0:
|
||||
ignorecount = ignores[0][0]
|
||||
havefiles = havefiles + ignorecount
|
||||
logger.fdebug('Adjusting have total to ' + str(havefiles) + ' because of this many Ignored files:' + str(ignorecount))
|
||||
logger.fdebug(module + ' Adjusting have total to ' + str(havefiles) + ' because of this many Ignored files:' + str(ignorecount))
|
||||
|
||||
#now that we are finished...
|
||||
#adjust for issues that have been marked as Downloaded, but aren't found/don't exist.
|
||||
|
@ -945,7 +951,7 @@ def forceRescan(ComicID,archive=None):
|
|||
#print ("comiclocation: " + str(rescan['ComicLocation']))
|
||||
#print ("downlocation: " + str(down['Location']))
|
||||
if down['Location'] is None:
|
||||
logger.fdebug('location does not exist which means file was not downloaded successfully, or was moved.')
|
||||
logger.fdebug(module + ' Location does not exist which means file was not downloaded successfully, or was moved.')
|
||||
controlValue = {"IssueID": down['IssueID']}
|
||||
newValue = {"Status": "Archived"}
|
||||
myDB.upsert("issues", newValue, controlValue)
|
||||
|
@ -964,9 +970,9 @@ def forceRescan(ComicID,archive=None):
|
|||
archivedissues+=1
|
||||
totalarc = arcfiles + archivedissues
|
||||
havefiles = havefiles + archivedissues #arcfiles already tallied in havefiles in above segment
|
||||
logger.fdebug('arcfiles : ' + str(arcfiles))
|
||||
logger.fdebug('havefiles: ' + str(havefiles))
|
||||
logger.fdebug('I have changed the status of ' + str(archivedissues) + ' issues to a status of Archived, as I now cannot locate them in the series directory.')
|
||||
logger.fdebug(module + ' arcfiles : ' + str(arcfiles))
|
||||
logger.fdebug(module + ' havefiles: ' + str(havefiles))
|
||||
logger.fdebug(module + ' I have changed the status of ' + str(archivedissues) + ' issues to a status of Archived, as I now cannot locate them in the series directory.')
|
||||
|
||||
|
||||
#let's update the total count of comics that was found.
|
||||
|
@ -977,6 +983,6 @@ def forceRescan(ComicID,archive=None):
|
|||
combined_total = rescan['Total'] + anncnt
|
||||
|
||||
myDB.upsert("comics", newValueStat, controlValueStat)
|
||||
logger.info('I have physically found ' + str(foundcount) + ' issues, ignored ' + str(ignorecount) + ' issues, and accounted for ' + str(totalarc) + ' in an Archived state. Total Issue Count: ' + str(havefiles) + ' / ' + str(combined_total))
|
||||
logger.info(module + ' I have physically found ' + str(foundcount) + ' issues, ignored ' + str(ignorecount) + ' issues, and accounted for ' + str(totalarc) + ' in an Archived state. Total Issue Count: ' + str(havefiles) + ' / ' + str(combined_total))
|
||||
|
||||
return
|
||||
|
|
|
@ -802,12 +802,15 @@ class WebInterface(object):
|
|||
Publisher = miy['ComicPublisher']
|
||||
UseAFuzzy = miy['UseFuzzy']
|
||||
ComicVersion = miy['ComicVersion']
|
||||
foundcom, prov = search.search_init(ComicName, ComicIssue, ComicYear, SeriesYear, Publisher, issues['IssueDate'], storedate, IssueID, AlternateSearch, UseAFuzzy, ComicVersion, mode=mode, ComicID=ComicID)
|
||||
foundcom, prov = search.search_init(ComicName, ComicIssue, ComicYear, SeriesYear, Publisher, issues['IssueDate'], storedate, IssueID, AlternateSearch, UseAFuzzy, ComicVersion, mode=mode, ComicID=ComicID, manualsearch=manualsearch)
|
||||
if foundcom == "yes":
|
||||
# file check to see if issue exists and update 'have' count
|
||||
if IssueID is not None:
|
||||
logger.info("passing to updater.")
|
||||
return updater.foundsearch(ComicID, IssueID, mode=mode, provider=prov)
|
||||
if manualsearch:
|
||||
# if it's a manual search, return to null here so the thread will die and not cause http redirect errors.
|
||||
return
|
||||
if ComicID:
|
||||
raise cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % ComicID)
|
||||
else:
|
||||
|
@ -2437,6 +2440,7 @@ class WebInterface(object):
|
|||
"cmtagger_path" : mylar.CMTAGGER_PATH,
|
||||
"ct_tag_cr" : helpers.checked(mylar.CT_TAG_CR),
|
||||
"ct_tag_cbl" : helpers.checked(mylar.CT_TAG_CBL),
|
||||
"ct_cbz_overwrite" : helpers.checked(mylar.CT_CBZ_OVERWRITE),
|
||||
"branch" : version.MYLAR_VERSION,
|
||||
"br_type" : mylar.INSTALL_TYPE,
|
||||
"br_version" : mylar.versioncheck.getVersion(),
|
||||
|
@ -2618,7 +2622,7 @@ class WebInterface(object):
|
|||
nzbget_host=None, nzbget_port=None, nzbget_username=None, nzbget_password=None, nzbget_category=None, nzbget_priority=None, nzbget_directory=None,
|
||||
usenet_retention=None, nzbsu=0, nzbsu_uid=None, nzbsu_apikey=None, dognzb=0, dognzb_uid=None, dognzb_apikey=None, newznab=0, newznab_host=None, newznab_name=None, newznab_apikey=None, newznab_uid=None, newznab_enabled=0,
|
||||
raw=0, raw_provider=None, raw_username=None, raw_password=None, raw_groups=None, experimental=0,
|
||||
enable_meta=0, cmtagger_path=None, ct_tag_cr=0, ct_tag_cbl=0, enable_rss=0, rss_checkinterval=None, enable_torrent_search=0, enable_kat=0, enable_cbt=0, cbt_passkey=None, snatchedtorrent_notify=0,
|
||||
enable_meta=0, cmtagger_path=None, ct_tag_cr=0, ct_tag_cbl=0, ct_cbz_overwrite=0, enable_rss=0, rss_checkinterval=None, enable_torrent_search=0, enable_kat=0, enable_cbt=0, cbt_passkey=None, snatchedtorrent_notify=0,
|
||||
enable_torrents=0, minseeds=0, torrent_local=0, local_watchdir=None, torrent_seedbox=0, seedbox_watchdir=None, seedbox_user=None, seedbox_pass=None, seedbox_host=None, seedbox_port=None,
|
||||
prowl_enabled=0, prowl_onsnatch=0, prowl_keys=None, prowl_priority=None, nma_enabled=0, nma_apikey=None, nma_priority=0, nma_onsnatch=0, pushover_enabled=0, pushover_onsnatch=0, pushover_apikey=None, pushover_userkey=None, pushover_priority=None, boxcar_enabled=0, boxcar_onsnatch=0, boxcar_token=None,
|
||||
pushbullet_enabled=0, pushbullet_apikey=None, pushbullet_deviceid=None, pushbullet_onsnatch=0,
|
||||
|
@ -2746,6 +2750,7 @@ class WebInterface(object):
|
|||
mylar.CMTAGGER_PATH = cmtagger_path
|
||||
mylar.CT_TAG_CR = ct_tag_cr
|
||||
mylar.CT_TAG_CBL = ct_tag_cbl
|
||||
mylar.CT_CBZ_OVERWRITE = ct_cbz_overwrite
|
||||
mylar.LOG_DIR = log_dir
|
||||
mylar.LOG_LEVEL = log_level
|
||||
mylar.CHMOD_DIR = chmod_dir
|
||||
|
|
|
@ -843,7 +843,10 @@ def checkthis(datecheck,datestatus,usedate):
|
|||
|
||||
return valid_check
|
||||
|
||||
def weekly_singlecopy(comicid, issuenum, file, path):
|
||||
def weekly_singlecopy(comicid, issuenum, file, path, module=None):
|
||||
if module is None:
|
||||
module = ''
|
||||
module += '[WEEKLY-PULL]'
|
||||
myDB = db.DBConnection()
|
||||
try:
|
||||
pull_date = myDB.selectone("SELECT SHIPDATE from weekly").fetchone()
|
||||
|
@ -852,22 +855,22 @@ def weekly_singlecopy(comicid, issuenum, file, path):
|
|||
else:
|
||||
pulldate = pull_date['SHIPDATE']
|
||||
|
||||
logger.fdebug(u"Weekly pull list detected as : " + str(pulldate))
|
||||
logger.fdebug(module + ' Weekly pull list detected as : ' + str(pulldate))
|
||||
|
||||
except (sqlite3.OperationalError, TypeError),msg:
|
||||
logger.info(u"Error determining current weekly pull-list date - you should refresh the pull-list manually probably.")
|
||||
logger.info(module + ' Error determining current weekly pull-list date - you should refresh the pull-list manually probably.')
|
||||
return
|
||||
|
||||
chkit = myDB.selectone('SELECT * FROM weekly WHERE ComicID=? AND ISSUE=?',[comicid, issuenum]).fetchone()
|
||||
if chkit is None:
|
||||
logger.fdebug(file + ' is not on the weekly pull-list or it is a one-off download that is not supported as of yet.')
|
||||
logger.fdebug(module + ' ' + file + ' is not on the weekly pull-list or it is a one-off download that is not supported as of yet.')
|
||||
return
|
||||
|
||||
logger.info('issue on weekly pull-list.')
|
||||
logger.info(module + ' Issue found on weekly pull-list.')
|
||||
|
||||
if mylar.WEEKFOLDER:
|
||||
desdir = os.path.join(mylar.DESTINATION_DIR, pulldate)
|
||||
dircheck = mylar.filechecker.validateAndCreateDirectory(desdir, True)
|
||||
dircheck = mylar.filechecker.validateAndCreateDirectory(desdir, True, module=module)
|
||||
if dircheck:
|
||||
pass
|
||||
else:
|
||||
|
@ -882,9 +885,9 @@ def weekly_singlecopy(comicid, issuenum, file, path):
|
|||
try:
|
||||
shutil.copy2(srcfile, desfile)
|
||||
except IOError as e:
|
||||
logger.error('Could not copy ' + str(srcfile) + ' to ' + str(desfile))
|
||||
logger.error(module + ' Could not copy ' + str(srcfile) + ' to ' + str(desfile))
|
||||
return
|
||||
|
||||
logger.info('[WEEKLY-PULL] Sucessfully copied to ' + desfile.encode('utf-8').strip() )
|
||||
logger.info(module + ' Sucessfully copied to ' + desfile.encode('utf-8').strip() )
|
||||
return
|
||||
|
||||
|
|
Loading…
Reference in New Issue