2012-10-30 10:43:01 +00:00
# This file is part of Mylar.
#
# Mylar is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mylar is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mylar. If not, see <http://www.gnu.org/licenses/>.
2012-12-27 15:04:03 +00:00
from __future__ import with_statement
2012-10-30 10:43:01 +00:00
import os
import shutil
2012-12-20 11:52:21 +00:00
import re
2012-12-27 15:04:03 +00:00
import shlex
2012-10-30 10:43:01 +00:00
import time
2012-12-27 15:04:03 +00:00
import logging
2012-10-30 10:43:01 +00:00
import mylar
2012-12-27 15:04:03 +00:00
import subprocess
2013-01-07 18:26:15 +00:00
import urllib2
import sqlite3
from xml . dom . minidom import parseString
2012-10-30 10:43:01 +00:00
2013-07-01 05:19:15 +00:00
2013-04-22 03:43:57 +00:00
from mylar import logger , db , helpers , updater , notifiers , filechecker
2012-10-30 10:43:01 +00:00
2012-12-27 15:04:03 +00:00
class PostProcessor ( object ) :
"""
A class which will process a media file according to the post processing settings in the config .
"""
EXISTS_LARGER = 1
EXISTS_SAME = 2
EXISTS_SMALLER = 3
DOESNT_EXIST = 4
# IGNORED_FILESTRINGS = [ "" ]
NZB_NAME = 1
FOLDER_NAME = 2
FILE_NAME = 3
def __init__ ( self , nzb_name , nzb_folder ) :
"""
Creates a new post processor with the given file path and optionally an NZB name .
file_path : The path to the file to be processed
nzb_name : The name of the NZB which resulted in this file being downloaded ( optional )
"""
# absolute path to the folder that is being processed
#self.folder_path = ek.ek(os.path.dirname, ek.ek(os.path.abspath, file_path))
# full path to file
#self.file_path = file_path
# file name only
#self.file_name = ek.ek(os.path.basename, file_path)
# the name of the folder only
#self.folder_name = ek.ek(os.path.basename, self.folder_path)
# name of the NZB that resulted in this folder
self . nzb_name = nzb_name
self . nzb_folder = nzb_folder
#self.in_history = False
#self.release_group = None
#self.is_proper = False
self . log = ' '
def _log ( self , message , level = logger . MESSAGE ) :
"""
A wrapper for the internal logger which also keeps track of messages and saves them to a string for $
message : The string to log ( unicode )
level : The log level to use ( optional )
"""
# logger.log(message, level)
self . log + = message + ' \n '
2013-01-13 17:10:41 +00:00
def _run_pre_scripts ( self , nzb_name , nzb_folder , seriesmetadata ) :
"""
Executes any pre scripts defined in the config .
ep_obj : The object to use when calling the pre script
"""
self . _log ( " initiating pre script detection. " , logger . DEBUG )
self . _log ( " mylar.PRE_SCRIPTS : " + mylar . PRE_SCRIPTS , logger . DEBUG )
# for currentScriptName in mylar.PRE_SCRIPTS:
2013-02-27 08:28:40 +00:00
currentScriptName = str ( mylar . PRE_SCRIPTS ) . decode ( " string_escape " )
2013-01-13 17:10:41 +00:00
self . _log ( " pre script detected...enabling: " + str ( currentScriptName ) , logger . DEBUG )
# generate a safe command line string to execute the script and provide all the parameters
2013-02-06 19:55:23 +00:00
script_cmd = shlex . split ( currentScriptName , posix = False ) + [ str ( nzb_name ) , str ( nzb_folder ) , str ( seriesmetadata ) ]
2013-01-13 17:10:41 +00:00
self . _log ( " cmd to be executed: " + str ( script_cmd ) , logger . DEBUG )
# use subprocess to run the command and capture output
self . _log ( u " Executing command " + str ( script_cmd ) )
self . _log ( u " Absolute path to script: " + script_cmd [ 0 ] , logger . DEBUG )
try :
p = subprocess . Popen ( script_cmd , stdout = subprocess . PIPE , stderr = subprocess . STDOUT , cwd = mylar . PROG_DIR )
out , err = p . communicate ( ) #@UnusedVariable
self . _log ( u " Script result: " + str ( out ) , logger . DEBUG )
except OSError , e :
2013-03-04 16:16:40 +00:00
self . _log ( u " Unable to run pre_script: " + str ( script_cmd ) )
2013-01-13 17:10:41 +00:00
2013-01-07 18:26:15 +00:00
def _run_extra_scripts ( self , nzb_name , nzb_folder , filen , folderp , seriesmetadata ) :
2012-12-27 15:04:03 +00:00
"""
Executes any extra scripts defined in the config .
ep_obj : The object to use when calling the extra script
"""
self . _log ( " initiating extra script detection. " , logger . DEBUG )
self . _log ( " mylar.EXTRA_SCRIPTS : " + mylar . EXTRA_SCRIPTS , logger . DEBUG )
# for curScriptName in mylar.EXTRA_SCRIPTS:
2013-01-29 09:02:23 +00:00
curScriptName = str ( mylar . EXTRA_SCRIPTS ) . decode ( " string_escape " )
2012-12-27 15:04:03 +00:00
self . _log ( " extra script detected...enabling: " + str ( curScriptName ) , logger . DEBUG )
# generate a safe command line string to execute the script and provide all the parameters
2013-01-07 18:26:15 +00:00
script_cmd = shlex . split ( curScriptName ) + [ str ( nzb_name ) , str ( nzb_folder ) , str ( filen ) , str ( folderp ) , str ( seriesmetadata ) ]
2012-12-27 15:04:03 +00:00
self . _log ( " cmd to be executed: " + str ( script_cmd ) , logger . DEBUG )
# use subprocess to run the command and capture output
self . _log ( u " Executing command " + str ( script_cmd ) )
self . _log ( u " Absolute path to script: " + script_cmd [ 0 ] , logger . DEBUG )
try :
p = subprocess . Popen ( script_cmd , stdout = subprocess . PIPE , stderr = subprocess . STDOUT , cwd = mylar . PROG_DIR )
out , err = p . communicate ( ) #@UnusedVariable
self . _log ( u " Script result: " + str ( out ) , logger . DEBUG )
except OSError , e :
self . _log ( u " Unable to run extra_script: " + str ( script_cmd ) )
def Process ( self ) :
self . _log ( " nzb name: " + str ( self . nzb_name ) , logger . DEBUG )
self . _log ( " nzb folder: " + str ( self . nzb_folder ) , logger . DEBUG )
2013-01-22 16:20:47 +00:00
logger . fdebug ( " nzb name: " + str ( self . nzb_name ) )
logger . fdebug ( " nzb folder: " + str ( self . nzb_folder ) )
2013-03-04 16:16:40 +00:00
if mylar . USE_SABNZBD == 0 :
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
logger . fdebug ( " Not using SABnzbd " )
elif mylar . USE_SABNZBD != 0 and self . nzb_name == ' Manual Run ' :
logger . fdebug ( ' Not using SABnzbd : Manual Run ' )
2013-03-04 16:16:40 +00:00
else :
# if the SAB Directory option is enabled, let's use that folder name and append the jobname.
if mylar . SAB_DIRECTORY is not None and mylar . SAB_DIRECTORY is not ' None ' and len ( mylar . SAB_DIRECTORY ) > 4 :
self . nzb_folder = os . path . join ( mylar . SAB_DIRECTORY , self . nzb_name ) . encode ( mylar . SYS_ENCODING )
#lookup nzb_name in nzblog table to get issueid
#query SAB to find out if Replace Spaces enabled / not as well as Replace Decimals
#http://localhost:8080/sabnzbd/api?mode=set_config§ion=misc&keyword=dirscan_speed&value=5
querysab = str ( mylar . SAB_HOST ) + " /api?mode=get_config§ion=misc&output=xml&apikey= " + str ( mylar . SAB_APIKEY )
#logger.info("querysab_string:" + str(querysab))
file = urllib2 . urlopen ( querysab )
data = file . read ( )
file . close ( )
dom = parseString ( data )
2013-05-15 09:04:26 +00:00
try :
sabreps = dom . getElementsByTagName ( ' replace_spaces ' ) [ 0 ] . firstChild . wholeText
except :
errorm = dom . getElementsByTagName ( ' error ' ) [ 0 ] . firstChild . wholeText
logger . error ( u " Error detected attempting to retrieve SAB data : " + errorm )
return
2013-03-04 16:16:40 +00:00
sabrepd = dom . getElementsByTagName ( ' replace_dots ' ) [ 0 ] . firstChild . wholeText
logger . fdebug ( " SAB Replace Spaces: " + str ( sabreps ) )
logger . fdebug ( " SAB Replace Dots: " + str ( sabrepd ) )
if mylar . USE_NZBGET == 1 :
logger . fdebug ( " Using NZBGET " )
logger . fdebug ( " NZB name as passed from NZBGet: " + self . nzb_name )
2012-12-27 15:04:03 +00:00
myDB = db . DBConnection ( )
2013-01-07 18:26:15 +00:00
2013-07-01 05:19:15 +00:00
if self . nzb_name == ' Manual Run ' :
2014-01-16 20:25:02 +00:00
logger . fdebug ( " manual run initiated " )
2013-07-01 05:19:15 +00:00
#Manual postprocessing on a folder.
#use the nzb_folder to determine every file
#walk the dir,
#once a series name and issue are matched,
#write the series/issue/filename to a tuple
#when all done, iterate over the tuple until completion...
comicseries = myDB . action ( " SELECT * FROM comics " )
manual_list = [ ]
if comicseries is None :
logger . error ( u " No Series in Watchlist - aborting Manual Post Processing. Maybe you should be running Import? " )
return
else :
ccnt = 0
nm = 0
2013-08-21 02:33:57 +00:00
watchvals = { }
2013-07-01 05:19:15 +00:00
for cs in comicseries :
2013-08-21 02:33:57 +00:00
watchvals = { " SeriesYear " : cs [ ' ComicYear ' ] ,
2013-09-18 04:49:24 +00:00
" LatestDate " : cs [ ' LatestDate ' ] ,
" ComicVersion " : cs [ ' ComicVersion ' ] ,
2013-08-21 02:33:57 +00:00
" Total " : cs [ ' Total ' ] }
watchmatch = filechecker . listFiles ( self . nzb_folder , cs [ ' ComicName ' ] , cs [ ' AlternateSearch ' ] , manual = watchvals )
2014-01-16 20:25:02 +00:00
if watchmatch [ ' comiccount ' ] == 0 : # is None:
2013-07-01 05:19:15 +00:00
nm + = 1
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
2013-11-28 15:48:59 +00:00
continue
2013-07-01 05:19:15 +00:00
else :
fn = 0
fccnt = int ( watchmatch [ ' comiccount ' ] )
2013-08-11 05:31:41 +00:00
if len ( watchmatch ) == 1 : continue
2013-07-01 05:19:15 +00:00
while ( fn < fccnt ) :
try :
tmpfc = watchmatch [ ' comiclist ' ] [ fn ]
2013-08-11 05:31:41 +00:00
except IndexError , KeyError :
2013-07-01 05:19:15 +00:00
break
temploc = tmpfc [ ' JusttheDigits ' ] . replace ( ' _ ' , ' ' )
temploc = re . sub ( ' [ \ # \' ] ' , ' ' , temploc )
logger . fdebug ( " temploc: " + str ( temploc ) )
ww = shlex . split ( temploc )
lnw = len ( ww )
wdc = 0
while ( wdc < lnw ) :
#counts get buggered up when the issue is the last field in the filename - ie. '50.cbr'
if " .cbr " in ww [ wdc ] . lower ( ) :
ww [ wdc ] = ww [ wdc ] . replace ( " .cbr " , " " )
elif " .cbz " in ww [ wdc ] . lower ( ) :
ww [ wdc ] = ww [ wdc ] . replace ( " .cbz " , " " )
if " (c2c) " in ww [ wdc ] . lower ( ) :
ww [ wdc ] = ww [ wdc ] . replace ( " (c2c) " , " " )
get_issue = shlex . split ( str ( ww [ wdc ] ) )
if ww [ wdc ] != " " :
ww [ wdc ] = get_issue [ 0 ]
if ' . ' in ww [ wdc ] :
#logger.fdebug("decimal detected...adjusting.")
try :
i = float ( ww [ wdc ] )
except ValueError , TypeError :
#not numeric
#logger.fdebug("NOT NUMERIC - new word: " + str(ww[wdc]))
ww [ wdc ] = ww [ wdc ] . replace ( " . " , " " )
else :
#numeric
pass
if ww [ wdc ] . isdigit ( ) :
if int ( ww [ wdc ] ) > 0 :
if wdc + 1 < len ( ww ) and ' au ' in ww [ wdc + 1 ] . lower ( ) :
if len ( ww [ wdc + 1 ] ) == 2 :
#if the 'AU' is in 005AU vs 005 AU it will yield different results.
ww [ wdc ] = ww [ wdc ] + ' AU '
ww [ wdc + 1 ] = ' 93939999919190933 '
logger . info ( " AU Detected seperate from issue - combining and continuing " )
fcdigit = helpers . issuedigits ( ww [ wdc ] )
if ' annual ' in self . nzb_name . lower ( ) :
logger . info ( " annual detected. " )
annchk = " yes "
issuechk = myDB . action ( " SELECT * from annuals WHERE ComicID=? AND Int_IssueNumber=? " , [ cs [ ' ComicID ' ] , fcdigit ] ) . fetchone ( )
else :
issuechk = myDB . action ( " SELECT * from issues WHERE ComicID=? AND Int_IssueNumber=? " , [ cs [ ' ComicID ' ] , fcdigit ] ) . fetchone ( )
if issuechk is None :
logger . info ( " No corresponding issue # found for " + str ( cs [ ' ComicID ' ] ) )
else :
logger . info ( " Found matching issue # " + str ( fcdigit ) + " for ComicID: " + str ( cs [ ' ComicID ' ] ) + " / IssueID: " + str ( issuechk [ ' IssueID ' ] ) )
manual_list . append ( { " ComicLocation " : tmpfc [ ' ComicLocation ' ] ,
" ComicID " : cs [ ' ComicID ' ] ,
" IssueID " : issuechk [ ' IssueID ' ] ,
" IssueNumber " : issuechk [ ' Issue_Number ' ] ,
" ComicName " : cs [ ' ComicName ' ] } )
ccnt + = 1
2014-01-16 20:25:02 +00:00
#print manual_list
2013-07-01 05:19:15 +00:00
wdc + = 1
fn + = 1
2014-01-16 20:25:02 +00:00
logger . fdebug ( " There are " + str ( len ( manual_list ) ) + " files found that match on your watchlist, " + str ( nm ) + " do not match anything and will be ignored. " )
2013-07-01 05:19:15 +00:00
2013-01-08 04:24:00 +00:00
2013-07-01 05:19:15 +00:00
else :
nzbname = self . nzb_name
#remove extensions from nzb_name if they somehow got through (Experimental most likely)
extensions = ( ' .cbr ' , ' .cbz ' )
if nzbname . lower ( ) . endswith ( extensions ) :
fd , ext = os . path . splitext ( nzbname )
self . _log ( " Removed extension from nzb: " + ext , logger . DEBUG )
nzbname = re . sub ( str ( ext ) , ' ' , str ( nzbname ) )
#replace spaces
nzbname = re . sub ( ' ' , ' . ' , str ( nzbname ) )
nzbname = re . sub ( ' [ \ , \ : \ ?] ' , ' ' , str ( nzbname ) )
nzbname = re . sub ( ' [ \ &] ' , ' and ' , str ( nzbname ) )
logger . fdebug ( " After conversions, nzbname is : " + str ( nzbname ) )
# if mylar.USE_NZBGET==1:
# nzbname=self.nzb_name
self . _log ( " nzbname: " + str ( nzbname ) , logger . DEBUG )
2013-01-07 18:26:15 +00:00
nzbiss = myDB . action ( " SELECT * from nzblog WHERE nzbname=? " , [ nzbname ] ) . fetchone ( )
2013-07-01 05:19:15 +00:00
2013-01-07 18:26:15 +00:00
if nzbiss is None :
2013-07-01 05:19:15 +00:00
self . _log ( " Failure - could not initially locate nzbfile in my database to rename. " , logger . DEBUG )
logger . fdebug ( " Failure - could not locate nzbfile initially. " )
# if failed on spaces, change it all to decimals and try again.
nzbname = re . sub ( ' _ ' , ' . ' , str ( nzbname ) )
self . _log ( " trying again with this nzbname: " + str ( nzbname ) , logger . DEBUG )
logger . fdebug ( " trying again with nzbname of : " + str ( nzbname ) )
nzbiss = myDB . action ( " SELECT * from nzblog WHERE nzbname=? " , [ nzbname ] ) . fetchone ( )
if nzbiss is None :
logger . error ( u " Unable to locate downloaded file to rename. PostProcessing aborted. " )
return
else :
self . _log ( " I corrected and found the nzb as : " + str ( nzbname ) )
logger . fdebug ( " auto-corrected and found the nzb as : " + str ( nzbname ) )
issueid = nzbiss [ ' IssueID ' ]
else :
2013-01-07 18:26:15 +00:00
issueid = nzbiss [ ' IssueID ' ]
2013-07-01 05:19:15 +00:00
logger . fdebug ( " issueid: " + str ( issueid ) )
sarc = nzbiss [ ' SARC ' ]
#use issueid to get publisher, series, year, issue number
2013-07-30 04:57:37 +00:00
2013-07-01 05:19:15 +00:00
annchk = " no "
if ' annual ' in nzbname . lower ( ) :
logger . info ( " annual detected. " )
annchk = " yes "
2013-07-30 04:57:37 +00:00
issuenzb = myDB . action ( " SELECT * from annuals WHERE IssueID=? AND ComicName NOT NULL " , [ issueid ] ) . fetchone ( )
2013-07-01 05:19:15 +00:00
else :
2013-07-30 04:57:37 +00:00
issuenzb = myDB . action ( " SELECT * from issues WHERE IssueID=? AND ComicName NOT NULL " , [ issueid ] ) . fetchone ( )
2013-07-01 05:19:15 +00:00
if issuenzb is not None :
2013-07-30 04:57:37 +00:00
logger . info ( " issuenzb found. " )
2013-07-01 05:19:15 +00:00
if helpers . is_number ( issueid ) :
sandwich = int ( issuenzb [ ' IssueID ' ] )
else :
2013-07-30 04:57:37 +00:00
logger . info ( " issuenzb not found. " )
2013-07-01 05:19:15 +00:00
#if it's non-numeric, it contains a 'G' at the beginning indicating it's a multi-volume
#using GCD data. Set sandwich to 1 so it will bypass and continue post-processing.
if ' S ' in issueid :
sandwich = issueid
elif ' G ' in issueid or ' - ' in issueid :
sandwich = 1
if helpers . is_number ( sandwich ) :
if sandwich < 900000 :
# if sandwich is less than 900000 it's a normal watchlist download. Bypass.
pass
else :
if issuenzb is None or ' S ' in sandwich or int ( sandwich ) > = 900000 :
# this has no issueID, therefore it's a one-off or a manual post-proc.
# At this point, let's just drop it into the Comic Location folder and forget about it..
if ' S ' in sandwich :
self . _log ( " One-off STORYARC mode enabled for Post-Processing for " + str ( sarc ) )
logger . info ( " One-off STORYARC mode enabled for Post-Processing for " + str ( sarc ) )
if mylar . STORYARCDIR :
storyarcd = os . path . join ( mylar . DESTINATION_DIR , " StoryArcs " , sarc )
self . _log ( " StoryArc Directory set to : " + storyarcd , logger . DEBUG )
else :
self . _log ( " Grab-Bag Directory set to : " + mylar . GRABBAG_DIR , logger . DEBUG )
2013-05-25 06:18:00 +00:00
else :
2013-07-01 05:19:15 +00:00
self . _log ( " One-off mode enabled for Post-Processing. All I ' m doing is moving the file untouched into the Grab-bag directory. " , logger . DEBUG )
logger . info ( " One-off mode enabled for Post-Processing. Will move into Grab-bag directory. " )
2013-05-25 06:18:00 +00:00
self . _log ( " Grab-Bag Directory set to : " + mylar . GRABBAG_DIR , logger . DEBUG )
2013-07-01 05:19:15 +00:00
for root , dirnames , filenames in os . walk ( self . nzb_folder ) :
for filename in filenames :
if filename . lower ( ) . endswith ( extensions ) :
ofilename = filename
path , ext = os . path . splitext ( ofilename )
2013-05-25 06:18:00 +00:00
2013-07-01 05:19:15 +00:00
if ' S ' in sandwich :
if mylar . STORYARCDIR :
grdst = storyarcd
else :
grdst = mylar . DESTINATION_DIR
2013-05-25 06:18:00 +00:00
else :
2013-07-01 05:19:15 +00:00
if mylar . GRABBAG_DIR :
grdst = mylar . GRABBAG_DIR
else :
grdst = mylar . DESTINATION_DIR
filechecker . validateAndCreateDirectory ( grdst , True )
2013-05-25 06:18:00 +00:00
2013-07-01 05:19:15 +00:00
if ' S ' in sandwich :
#if from a StoryArc, check to see if we're appending the ReadingOrder to the filename
if mylar . READ2FILENAME :
issuearcid = re . sub ( ' S ' , ' ' , issueid )
2013-09-18 04:49:24 +00:00
logger . fdebug ( ' issuearcid: ' + str ( issuearcid ) )
arcdata = myDB . action ( " SELECT * FROM readinglist WHERE IssueArcID=? " , [ issuearcid ] ) . fetchone ( )
logger . fdebug ( ' readingorder#: ' + str ( arcdata [ ' ReadingOrder ' ] ) )
2013-07-01 05:19:15 +00:00
if int ( arcdata [ ' ReadingOrder ' ] ) < 10 : readord = " 00 " + str ( arcdata [ ' ReadingOrder ' ] )
elif int ( arcdata [ ' ReadingOrder ' ] ) > 10 and int ( arcdata [ ' ReadingOrder ' ] ) < 99 : readord = " 0 " + str ( arcdata [ ' ReadingOrder ' ] )
else : readord = str ( arcdata [ ' ReadingOrder ' ] )
dfilename = str ( readord ) + " - " + ofilename
else :
dfilename = ofilename
grab_dst = os . path . join ( grdst , dfilename )
else :
grab_dst = os . path . join ( grdst , ofilename )
self . _log ( " Destination Path : " + grab_dst , logger . DEBUG )
logger . info ( " Destination Path : " + grab_dst )
grab_src = os . path . join ( self . nzb_folder , ofilename )
self . _log ( " Source Path : " + grab_src , logger . DEBUG )
logger . info ( " Source Path : " + grab_src )
logger . info ( " Moving " + str ( ofilename ) + " into directory : " + str ( grdst ) )
try :
shutil . move ( grab_src , grab_dst )
except ( OSError , IOError ) :
self . _log ( " Failed to move directory - check directories and manually re-run. " , logger . DEBUG )
logger . debug ( " Failed to move directory - check directories and manually re-run. " )
return
#tidyup old path
try :
shutil . rmtree ( self . nzb_folder )
except ( OSError , IOError ) :
self . _log ( " Failed to remove temporary directory. " , logger . DEBUG )
logger . debug ( " Failed to remove temporary directory - check directory and manually re-run. " )
return
logger . debug ( " Removed temporary directory : " + str ( self . nzb_folder ) )
self . _log ( " Removed temporary directory : " + self . nzb_folder , logger . DEBUG )
#delete entry from nzblog table
myDB . action ( ' DELETE from nzblog WHERE issueid=? ' , [ issueid ] )
if ' S ' in issueid :
issuearcid = re . sub ( ' S ' , ' ' , issueid )
logger . info ( " IssueArcID is : " + str ( issuearcid ) )
ctrlVal = { " IssueArcID " : issuearcid }
newVal = { " Status " : " Downloaded " ,
" Location " : grab_dst }
myDB . upsert ( " readinglist " , newVal , ctrlVal )
logger . info ( " updated status to Downloaded " )
return self . log
if self . nzb_name == ' Manual Run ' :
#loop through the hits here.
if len ( manual_list ) == ' 0 ' :
logger . info ( " No hits ... breakout. " )
return
2013-05-25 06:18:00 +00:00
2013-07-01 05:19:15 +00:00
for ml in manual_list :
comicid = ml [ ' ComicID ' ]
issueid = ml [ ' IssueID ' ]
issuenumOG = ml [ ' IssueNumber ' ]
self . Process_next ( comicid , issueid , issuenumOG , ml )
2013-08-17 17:54:41 +00:00
return
2013-07-01 05:19:15 +00:00
else :
2013-08-11 05:31:41 +00:00
comicid = issuenzb [ ' ComicID ' ]
issuenumOG = issuenzb [ ' Issue_Number ' ]
2013-07-01 05:19:15 +00:00
return self . Process_next ( comicid , issueid , issuenumOG )
2013-05-25 06:18:00 +00:00
2013-07-01 05:19:15 +00:00
def Process_next ( self , comicid , issueid , issuenumOG , ml = None ) :
annchk = " no "
extensions = ( ' .cbr ' , ' .cbz ' )
myDB = db . DBConnection ( )
comicnzb = myDB . action ( " SELECT * from comics WHERE comicid=? " , [ comicid ] ) . fetchone ( )
2013-07-30 04:57:37 +00:00
issuenzb = myDB . action ( " SELECT * from issues WHERE issueid=? AND comicid=? AND ComicName NOT NULL " , [ issueid , comicid ] ) . fetchone ( )
2014-01-16 20:25:02 +00:00
logger . fdebug ( ' issueid: ' + str ( issueid ) )
logger . fdebug ( ' issuenumOG: ' + str ( issuenumOG ) )
2013-07-01 05:19:15 +00:00
if issuenzb is None :
2013-07-30 04:57:37 +00:00
issuenzb = myDB . action ( " SELECT * from annuals WHERE issueid=? and comicid=? " , [ issueid , comicid ] ) . fetchone ( )
2013-07-01 05:19:15 +00:00
annchk = " yes "
2013-01-03 10:01:07 +00:00
#issueno = str(issuenum).split('.')[0]
2013-03-29 04:02:35 +00:00
#new CV API - removed all decimals...here we go AGAIN!
2013-07-01 05:19:15 +00:00
issuenum = issuenzb [ ' Issue_Number ' ]
2013-04-02 08:56:24 +00:00
issue_except = ' None '
if ' au ' in issuenum . lower ( ) :
issuenum = re . sub ( " [^0-9] " , " " , issuenum )
issue_except = ' AU '
2013-07-01 05:19:15 +00:00
elif ' ai ' in issuenum . lower ( ) :
issuenum = re . sub ( " [^0-9] " , " " , issuenum )
issue_except = ' AI '
2013-03-29 04:02:35 +00:00
if ' . ' in issuenum :
iss_find = issuenum . find ( ' . ' )
iss_b4dec = issuenum [ : iss_find ]
iss_decval = issuenum [ iss_find + 1 : ]
if int ( iss_decval ) == 0 :
iss = iss_b4dec
issdec = int ( iss_decval )
issueno = str ( iss )
self . _log ( " Issue Number: " + str ( issueno ) , logger . DEBUG )
logger . fdebug ( " Issue Number: " + str ( issueno ) )
2013-01-03 10:01:07 +00:00
else :
2013-03-29 04:02:35 +00:00
if len ( iss_decval ) == 1 :
iss = iss_b4dec + " . " + iss_decval
issdec = int ( iss_decval ) * 10
else :
iss = iss_b4dec + " . " + iss_decval . rstrip ( ' 0 ' )
issdec = int ( iss_decval . rstrip ( ' 0 ' ) ) * 10
issueno = iss_b4dec
self . _log ( " Issue Number: " + str ( iss ) , logger . DEBUG )
logger . fdebug ( " Issue Number: " + str ( iss ) )
else :
iss = issuenum
issueno = str ( iss )
2012-12-27 15:04:03 +00:00
# issue zero-suppression here
if mylar . ZERO_LEVEL == " 0 " :
zeroadd = " "
else :
if mylar . ZERO_LEVEL_N == " none " : zeroadd = " "
elif mylar . ZERO_LEVEL_N == " 0x " : zeroadd = " 0 "
elif mylar . ZERO_LEVEL_N == " 00x " : zeroadd = " 00 "
2013-01-22 16:20:47 +00:00
logger . fdebug ( " Zero Suppression set to : " + str ( mylar . ZERO_LEVEL_N ) )
2012-12-27 15:04:03 +00:00
if str ( len ( issueno ) ) > 1 :
if int ( issueno ) < 10 :
self . _log ( " issue detected less than 10 " , logger . DEBUG )
2013-03-29 04:02:35 +00:00
if ' . ' in iss :
if int ( iss_decval ) > 0 :
issueno = str ( iss )
prettycomiss = str ( zeroadd ) + str ( iss )
else :
prettycomiss = str ( zeroadd ) + str ( int ( issueno ) )
2013-01-03 10:01:07 +00:00
else :
2013-03-29 04:02:35 +00:00
prettycomiss = str ( zeroadd ) + str ( iss )
2013-04-02 08:56:24 +00:00
if issue_except != ' None ' :
prettycomiss = str ( prettycomiss ) + issue_except
2012-12-27 15:04:03 +00:00
self . _log ( " Zero level supplement set to " + str ( mylar . ZERO_LEVEL_N ) + " . Issue will be set as : " + str ( prettycomiss ) , logger . DEBUG )
elif int ( issueno ) > = 10 and int ( issueno ) < 100 :
self . _log ( " issue detected greater than 10, but less than 100 " , logger . DEBUG )
if mylar . ZERO_LEVEL_N == " none " :
zeroadd = " "
else :
zeroadd = " 0 "
2013-03-29 04:02:35 +00:00
if ' . ' in iss :
if int ( iss_decval ) > 0 :
issueno = str ( iss )
prettycomiss = str ( zeroadd ) + str ( iss )
else :
prettycomiss = str ( zeroadd ) + str ( int ( issueno ) )
2013-01-03 10:01:07 +00:00
else :
2013-03-29 04:02:35 +00:00
prettycomiss = str ( zeroadd ) + str ( iss )
2013-04-02 08:56:24 +00:00
if issue_except != ' None ' :
prettycomiss = str ( prettycomiss ) + issue_except
2012-12-27 15:04:03 +00:00
self . _log ( " Zero level supplement set to " + str ( mylar . ZERO_LEVEL_N ) + " .Issue will be set as : " + str ( prettycomiss ) , logger . DEBUG )
2012-10-30 10:43:01 +00:00
else :
2012-12-27 15:04:03 +00:00
self . _log ( " issue detected greater than 100 " , logger . DEBUG )
2013-03-29 04:02:35 +00:00
if ' . ' in iss :
if int ( iss_decval ) > 0 :
issueno = str ( iss )
2012-12-27 15:04:03 +00:00
prettycomiss = str ( issueno )
2013-04-02 08:56:24 +00:00
if issue_except != ' None ' :
prettycomiss = str ( prettycomiss ) + issue_except
2012-12-27 15:04:03 +00:00
self . _log ( " Zero level supplement set to " + str ( mylar . ZERO_LEVEL_N ) + " . Issue will be set as : " + str ( prettycomiss ) , logger . DEBUG )
2012-10-30 10:43:01 +00:00
else :
prettycomiss = str ( issueno )
2012-12-27 15:04:03 +00:00
self . _log ( " issue length error - cannot determine length. Defaulting to None: " + str ( prettycomiss ) , logger . DEBUG )
2013-07-01 05:19:15 +00:00
if annchk == " yes " :
2013-07-30 04:57:37 +00:00
self . _log ( " Annual detected. " )
2013-01-22 16:20:47 +00:00
logger . fdebug ( " Pretty Comic Issue is : " + str ( prettycomiss ) )
2012-12-27 15:04:03 +00:00
issueyear = issuenzb [ ' IssueDate ' ] [ : 4 ]
self . _log ( " Issue Year: " + str ( issueyear ) , logger . DEBUG )
2013-01-22 16:20:47 +00:00
logger . fdebug ( " Issue Year : " + str ( issueyear ) )
2013-07-01 05:19:15 +00:00
# comicnzb= myDB.action("SELECT * from comics WHERE comicid=?", [comicid]).fetchone()
2012-12-27 15:04:03 +00:00
publisher = comicnzb [ ' ComicPublisher ' ]
self . _log ( " Publisher: " + publisher , logger . DEBUG )
2013-01-22 16:20:47 +00:00
logger . fdebug ( " Publisher: " + str ( publisher ) )
2013-03-08 01:49:47 +00:00
#we need to un-unicode this to make sure we can write the filenames properly for spec.chars
series = comicnzb [ ' ComicName ' ] . encode ( ' ascii ' , ' ignore ' ) . strip ( )
2012-12-27 15:04:03 +00:00
self . _log ( " Series: " + series , logger . DEBUG )
2013-01-22 16:20:47 +00:00
logger . fdebug ( " Series: " + str ( series ) )
2012-12-27 15:04:03 +00:00
seriesyear = comicnzb [ ' ComicYear ' ]
self . _log ( " Year: " + seriesyear , logger . DEBUG )
2013-01-22 16:20:47 +00:00
logger . fdebug ( " Year: " + str ( seriesyear ) )
2012-12-27 15:04:03 +00:00
comlocation = comicnzb [ ' ComicLocation ' ]
self . _log ( " Comic Location: " + comlocation , logger . DEBUG )
2013-01-22 16:20:47 +00:00
logger . fdebug ( " Comic Location: " + str ( comlocation ) )
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
comversion = comicnzb [ ' ComicVersion ' ]
self . _log ( " Comic Version: " + str ( comversion ) , logger . DEBUG )
logger . fdebug ( " Comic Version: " + str ( comversion ) )
if comversion is None :
comversion = ' None '
#if comversion is None, remove it so it doesn't populate with 'None'
if comversion == ' None ' :
chunk_f_f = re . sub ( ' \ $VolumeN ' , ' ' , mylar . FILE_FORMAT )
chunk_f = re . compile ( r ' \ s+ ' )
chunk_file_format = chunk_f . sub ( ' ' , chunk_f_f )
2013-04-07 06:34:37 +00:00
self . _log ( " No version # found for series - tag will not be available for renaming. " , logger . DEBUG )
logger . fdebug ( " No version # found for series, removing from filename " )
2013-04-07 06:54:13 +00:00
logger . fdebug ( " new format is now: " + str ( chunk_file_format ) )
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
else :
chunk_file_format = mylar . FILE_FORMAT
2013-07-01 05:19:15 +00:00
2014-01-16 20:25:02 +00:00
if annchk == " no " :
chunk_f_f = re . sub ( ' \ $Annual ' , ' ' , chunk_file_format )
chunk_f = re . compile ( r ' \ s+ ' )
chunk_file_format = chunk_f . sub ( ' ' , chunk_f_f )
logger . fdebug ( ' not an annual - removing from filename paramaters ' )
logger . fdebug ( ' new format: ' + str ( chunk_file_format ) )
else :
logger . fdebug ( ' chunk_file_format is: ' + str ( chunk_file_format ) )
if ' $Annual ' not in chunk_file_format :
#if it's an annual, but $Annual isn't specified in file_format, we need to
#force it in there, by default in the format of $Annual $Issue
prettycomiss = " Annual " + str ( prettycomiss )
logger . fdebug ( ' prettycomiss: ' + str ( prettycomiss ) )
2013-07-01 05:19:15 +00:00
ofilename = None
2013-12-11 18:18:13 +00:00
#if meta-tagging is not enabled, we need to declare the check as being fail
#if meta-tagging is enabled, it gets changed just below to a default of pass
pcheck = " fail "
2013-07-01 05:19:15 +00:00
#tag the meta.
if mylar . ENABLE_META :
self . _log ( " Metatagging enabled - proceeding... " )
logger . fdebug ( " Metatagging enabled - proceeding... " )
2013-07-06 02:42:58 +00:00
pcheck = " pass "
try :
import cmtagmylar
if ml is None :
pcheck = cmtagmylar . run ( self . nzb_folder , issueid = issueid )
else :
pcheck = cmtagmylar . run ( self . nzb_folder , issueid = issueid , manual = " yes " , filename = ml [ ' ComicLocation ' ] )
except ImportError :
logger . fdebug ( " comictaggerlib not found on system. Ensure the ENTIRE lib directory is located within mylar/lib/comictaggerlib/ " )
logger . fdebug ( " continuing with PostProcessing, but I ' m not using metadata. " )
pcheck = " fail "
2013-07-01 05:19:15 +00:00
if pcheck == " fail " :
self . _log ( " Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging... " )
logger . fdebug ( " Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging... " )
2013-08-04 05:57:21 +00:00
elif pcheck == " unrar error " :
self . _log ( " This is a corrupt archive - whether CRC errors or it ' s incomplete. Marking as BAD, and retrying a different copy. " )
logger . error ( " This is a corrupt archive - whether CRC errors or it ' s incomplete. Marking as BAD, and retrying a different copy. " )
return self . log
2013-07-01 05:19:15 +00:00
else :
otofilename = pcheck
self . _log ( " Sucessfully wrote metadata to .cbz - Continuing.. " )
2013-07-10 01:45:10 +00:00
logger . fdebug ( " Sucessfully wrote metadata to .cbz ( " + str ( otofilename ) + " ) - Continuing.. " )
2013-01-13 17:10:41 +00:00
#Run Pre-script
if mylar . ENABLE_PRE_SCRIPTS :
nzbn = self . nzb_name #original nzb name
nzbf = self . nzb_folder #original nzb folder
#name, comicyear, comicid , issueid, issueyear, issue, publisher
#create the dic and send it.
seriesmeta = [ ]
seriesmetadata = { }
seriesmeta . append ( {
' name ' : series ,
' comicyear ' : seriesyear ,
' comicid ' : comicid ,
' issueid ' : issueid ,
' issueyear ' : issueyear ,
' issue ' : issuenum ,
' publisher ' : publisher
} )
seriesmetadata [ ' seriesmeta ' ] = seriesmeta
self . _run_pre_scripts ( nzbn , nzbf , seriesmetadata )
2012-10-30 10:43:01 +00:00
#rename file and move to new path
#nfilename = series + " " + issueno + " (" + seriesyear + ")"
2013-01-14 05:12:59 +00:00
2012-12-27 15:04:03 +00:00
file_values = { ' $Series ' : series ,
' $Issue ' : prettycomiss ,
2013-01-14 05:12:59 +00:00
' $Year ' : issueyear ,
' $series ' : series . lower ( ) ,
' $Publisher ' : publisher ,
' $publisher ' : publisher . lower ( ) ,
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
' $VolumeY ' : ' V ' + str ( seriesyear ) ,
2014-01-16 20:25:02 +00:00
' $VolumeN ' : comversion ,
' $Annual ' : ' Annual '
2012-12-27 15:04:03 +00:00
}
2013-05-15 09:04:26 +00:00
2013-07-01 05:19:15 +00:00
#if it's a Manual Run, use the ml['ComicLocation'] for the exact filename.
if ml is None :
for root , dirnames , filenames in os . walk ( self . nzb_folder ) :
for filename in filenames :
if filename . lower ( ) . endswith ( extensions ) :
ofilename = filename
path , ext = os . path . splitext ( ofilename )
else :
2013-12-11 18:18:13 +00:00
if pcheck == " fail " :
otofilename = ml [ ' ComicLocation ' ]
2014-01-16 20:25:02 +00:00
logger . fdebug ( ' otofilename: ' + str ( otofilename ) )
2013-07-01 05:19:15 +00:00
odir , ofilename = os . path . split ( otofilename )
2014-01-16 20:25:02 +00:00
logger . fdebug ( ' ofilename: ' + str ( ofilename ) )
2013-07-01 05:19:15 +00:00
path , ext = os . path . splitext ( ofilename )
2014-01-16 20:25:02 +00:00
logger . fdebug ( ' path: ' + str ( path ) )
logger . fdebug ( ' ext: ' + str ( ext ) )
2013-05-15 09:04:26 +00:00
if ofilename is None :
logger . error ( u " Aborting PostProcessing - the filename doesn ' t exist in the location given. Make sure that " + str ( self . nzb_folder ) + " exists and is the correct location. " )
return
2012-12-27 15:04:03 +00:00
self . _log ( " Original Filename: " + ofilename , logger . DEBUG )
self . _log ( " Original Extension: " + ext , logger . DEBUG )
2013-01-22 16:20:47 +00:00
logger . fdebug ( " Original Filname: " + str ( ofilename ) )
logger . fdebug ( " Original Extension: " + str ( ext ) )
2013-02-14 18:56:55 +00:00
if mylar . FILE_FORMAT == ' ' or not mylar . RENAME_FILES :
2012-12-27 15:04:03 +00:00
self . _log ( " Rename Files isn ' t enabled...keeping original filename. " , logger . DEBUG )
2013-01-22 16:20:47 +00:00
logger . fdebug ( " Rename Files isn ' t enabled - keeping original filename. " )
2012-12-27 15:04:03 +00:00
#check if extension is in nzb_name - will screw up otherwise
if ofilename . lower ( ) . endswith ( extensions ) :
nfilename = ofilename [ : - 4 ]
else :
nfilename = ofilename
2012-10-30 10:43:01 +00:00
else :
FIX:(#304) Index out of range on recheck, FIX:(#303) Comicvine link updated on details page, FIX:(#302) Query rate (Search Delay) Added as a configuration option, FIX:(#300) Version number error on searching when no version number, FIX:(#297) Manual Rename of Files working, FIX:(#294) 'AU' issue problems should be resolved now, FIX:(#290) V#(year) and Vol#(year) added to filechecking, IMP: ComicVine data use only now as a hidden option (cv_only = 1 in config.ini), IMP: added as options for file naming, IMP: Rough drafting of Annuals (annuals_on = 1 in config.ini), Other fixes..
2013-04-06 09:43:18 +00:00
nfilename = helpers . replace_all ( chunk_file_format , file_values )
2012-12-27 15:04:03 +00:00
if mylar . REPLACE_SPACES :
#mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
nfilename = nfilename . replace ( ' ' , mylar . REPLACE_CHAR )
2013-04-12 02:14:27 +00:00
nfilename = re . sub ( ' [ \ , \ : \ ?] ' , ' ' , nfilename )
2014-01-16 20:25:02 +00:00
nfilename = re . sub ( ' [ \ /] ' , ' - ' , nfilename )
2012-12-27 15:04:03 +00:00
self . _log ( " New Filename: " + nfilename , logger . DEBUG )
2013-01-22 16:20:47 +00:00
logger . fdebug ( " New Filename: " + str ( nfilename ) )
2012-12-27 15:04:03 +00:00
2013-01-22 16:20:47 +00:00
src = os . path . join ( self . nzb_folder , ofilename )
2013-04-22 03:43:57 +00:00
filechecker . validateAndCreateDirectory ( comlocation , True )
2013-01-14 05:12:59 +00:00
if mylar . LOWERCASE_FILENAMES :
dst = ( comlocation + " / " + nfilename + ext ) . lower ( )
else :
dst = comlocation + " / " + nfilename + ext . lower ( )
2012-12-27 15:04:03 +00:00
self . _log ( " Source: " + src , logger . DEBUG )
self . _log ( " Destination: " + dst , logger . DEBUG )
2013-01-22 16:20:47 +00:00
logger . fdebug ( " Source: " + str ( src ) )
logger . fdebug ( " Destination: " + str ( dst ) )
2013-07-01 05:19:15 +00:00
if ml is None :
#non-manual run moving/deleting...
2014-01-16 20:25:02 +00:00
logger . fdebug ( ' self.nzb_folder: ' + self . nzb_folder )
logger . fdebug ( ' ofilename: ' + str ( ofilename ) )
logger . fdebug ( ' nfilename: ' + str ( nfilename + ext ) )
2013-07-01 05:19:15 +00:00
os . rename ( os . path . join ( self . nzb_folder , str ( ofilename ) ) , os . path . join ( self . nzb_folder , str ( nfilename + ext ) ) )
src = os . path . join ( self . nzb_folder , str ( nfilename + ext ) )
try :
shutil . move ( src , dst )
except ( OSError , IOError ) :
self . _log ( " Failed to move directory - check directories and manually re-run. " , logger . DEBUG )
self . _log ( " Post-Processing ABORTED. " , logger . DEBUG )
return
#tidyup old path
try :
shutil . rmtree ( self . nzb_folder )
except ( OSError , IOError ) :
self . _log ( " Failed to remove temporary directory - check directory and manually re-run. " , logger . DEBUG )
self . _log ( " Post-Processing ABORTED. " , logger . DEBUG )
return
self . _log ( " Removed temporary directory : " + str ( self . nzb_folder ) , logger . DEBUG )
else :
#Manual Run, this is the portion.
logger . fdebug ( " Renaming " + os . path . join ( self . nzb_folder , str ( ofilename ) ) + " ..to.. " + os . path . join ( self . nzb_folder , str ( nfilename + ext ) ) )
os . rename ( os . path . join ( self . nzb_folder , str ( ofilename ) ) , os . path . join ( self . nzb_folder , str ( nfilename + ext ) ) )
src = os . path . join ( self . nzb_folder , str ( nfilename + ext ) )
logger . fdebug ( " Moving " + src + " ... to ... " + dst )
try :
shutil . move ( src , dst )
except ( OSError , IOError ) :
2013-08-17 17:54:41 +00:00
logger . fdebug ( " Failed to move directory - check directories and manually re-run. " )
logger . fdebug ( " Post-Processing ABORTED. " )
2013-07-01 05:19:15 +00:00
return
2013-08-17 17:54:41 +00:00
logger . fdebug ( " Successfully moved to : " + dst )
2013-07-01 05:19:15 +00:00
#tidyup old path
2013-08-17 17:54:41 +00:00
#try:
# os.remove(os.path.join(self.nzb_folder, str(ofilename)))
# logger.fdebug("Deleting : " + os.path.join(self.nzb_folder, str(ofilename)))
#except (OSError, IOError):
# logger.fdebug("Failed to remove temporary directory - check directory and manually re-run.")
# logger.fdebug("Post-Processing ABORTED.")
# return
#logger.fdebug("Removed temporary directory : " + str(self.nzb_folder))
2012-12-27 15:04:03 +00:00
#delete entry from nzblog table
myDB . action ( ' DELETE from nzblog WHERE issueid=? ' , [ issueid ] )
2013-05-15 09:04:26 +00:00
#update snatched table to change status to Downloaded
2013-07-01 05:19:15 +00:00
if annchk == " no " :
updater . foundsearch ( comicid , issueid , down = ' True ' )
else :
updater . foundsearch ( comicid , issueid , mode = ' want_ann ' , down = ' True ' )
2012-12-27 15:04:03 +00:00
#force rescan of files
updater . forceRescan ( comicid )
2013-04-07 18:06:36 +00:00
logger . info ( u " Post-Processing completed for: " + series + " issue: " + str ( issuenumOG ) )
2012-12-27 15:04:03 +00:00
self . _log ( u " Post Processing SUCCESSFULL! " , logger . DEBUG )
2013-08-17 17:54:41 +00:00
if ml is not None :
return
else :
if mylar . PROWL_ENABLED :
pushmessage = series + ' ( ' + issueyear + ' ) - issue # ' + issuenumOG
logger . info ( u " Prowl request " )
prowl = notifiers . PROWL ( )
prowl . notify ( pushmessage , " Download and Postprocessing completed " )
if mylar . NMA_ENABLED :
nma = notifiers . NMA ( )
nma . notify ( series , str ( issueyear ) , str ( issuenumOG ) )
if mylar . PUSHOVER_ENABLED :
pushmessage = series + ' ( ' + str ( issueyear ) + ' ) - issue # ' + str ( issuenumOG )
logger . info ( u " Pushover request " )
pushover = notifiers . PUSHOVER ( )
pushover . notify ( pushmessage , " Download and Post-Processing completed " )
2013-09-07 06:05:32 +00:00
if mylar . BOXCAR_ENABLED :
boxcar = notifiers . BOXCAR ( )
boxcar . notify ( series , str ( issueyear ) , str ( issuenumOG ) )
2013-04-22 04:13:56 +00:00
2012-12-27 15:04:03 +00:00
# retrieve/create the corresponding comic objects
if mylar . ENABLE_EXTRA_SCRIPTS :
2013-01-07 18:26:15 +00:00
folderp = str ( dst ) #folder location after move/rename
nzbn = self . nzb_name #original nzb name
filen = str ( nfilename + ext ) #new filename
#name, comicyear, comicid , issueid, issueyear, issue, publisher
#create the dic and send it.
seriesmeta = [ ]
seriesmetadata = { }
seriesmeta . append ( {
' name ' : series ,
' comicyear ' : seriesyear ,
' comicid ' : comicid ,
' issueid ' : issueid ,
' issueyear ' : issueyear ,
' issue ' : issuenum ,
' publisher ' : publisher
} )
seriesmetadata [ ' seriesmeta ' ] = seriesmeta
self . _run_extra_scripts ( nzbname , self . nzb_folder , filen , folderp , seriesmetadata )
2012-12-27 15:04:03 +00:00
return self . log
2012-10-30 10:43:01 +00:00