FIX:(#1039)(#1036) Problems sending files to SABnzbd when Mylar & SABnzbd are on seperate machines. Will now download the nzb, verify it, then send to SABnzbd via internal Mylar API, IMP: Seperated the SABnzbd Download Directory option within the Configuration to indicate is only required when Mylar and SABnzbd are on seperate machines, FIX:(#1037) Unable to retrieve Download URL when using NZBMegaSearch as a provider, IMP:(#1033) Story Arc issues downloaded via torrents/nzbs/outside of Mylar can now be manually post-processed into correct Story Arcs, as well as being downloaded directly via SAB/NZBget, FIX: Retrying a Snatched Download from either the History or Series Detail page should now be working, FIX:(#1042) Changed script source to relative URL for jquery-1.9.1.js, IMP: Start of small code clean-up

This commit is contained in:
evilhero 2015-06-06 21:44:46 -04:00
parent 0c1e864099
commit d6009fa0a3
8 changed files with 591 additions and 189 deletions

View File

@ -108,7 +108,7 @@
</footer>
<a href="#main" id="toTop"><span>Back to top</span></a>
</div>
<script src="http://code.jquery.com/jquery-1.9.1.js"></script>
<script src="//code.jquery.com/jquery-1.9.1.js"></script>
<!--<script src="http://code.jquery.com/ui/1.10.3/jquery-ui.js"></script> -->
<script src="js/libs/jquery-1.7.2.min.js"></script>

View File

@ -285,11 +285,6 @@
<a href="#" style="float:right" type="button" id="find_sabapi" data-success="Sucessfully retrieved SABnzbd API" data-error="Error auto-populating SABnzbd API"><span class="ui-icon ui-icon-extlink"></span>Get API</a>
</div>
</div>
<div class="row">
<label>SABnzbd Download Directory</label>
<input type="text" name="sab_directory" value="${config['sab_directory']}" size="36" />
<small>Where your SAB downloads go... (optional)</small>
</div>
<div class="row">
<label>SABnzbd Category:</label>
@ -309,6 +304,19 @@
%endfor
</select>
</div>
<div class="row checkbox left clearfix">
<input id="sab_to_mylar" type="checkbox" onclick="initConfigCheckbox($(this));" name="sab_to_mylar" value="1" ${config['sab_to_mylar']} /><label>Are Mylar / SABnzbd on seperate machines</label>
<small class="heading"><span style="float: left; margin-right: .3em; margin-top: 4px;" class="ui-icon ui-icon-info"></span>This is *ONLY* required if Mylar and SABnzbd are on seperate machines, otherwise don't touch it</small>
</div>
<div class="config">
<div class="row">
<label>SABnzbd Download Directory</label>
<input type="text" name="sab_directory" value="${config['sab_directory']}" size="36" />
<small>Where your SAB downloads go (required for PP)</small>
</div>
</div>
<!--
<div class="row">
<a href="#" style="float:right" type="button" onclick="doAjaxCall('addAction();SABtest',$(this))" data-success="Sucessfully tested SABnzbd connection" data-error="Error testing SABnzbd connection"><span class="ui-icon ui-icon-extlink"></span>Test SABnzbd</a>
@ -1301,6 +1309,7 @@
initConfigCheckbox("#launch_browser");
initConfigCheckbox("#enable_https");
initConfigCheckbox("#enable_api");
initConfigCheckbox("#sab_to_mylar");
initConfigCheckbox("#usenewznab");
initConfigCheckbox("#usenzbsu");
initConfigCheckbox("#usedognzb");

View File

@ -232,8 +232,7 @@ class PostProcessor(object):
comicseries = myDB.select("SELECT * FROM comics")
manual_list = []
if comicseries is None:
logger.error(module + ' No Series in Watchlist - aborting Manual Post Processing. Maybe you should be running Import?')
return
logger.error(module + ' No Series in Watchlist - checking against Story Arcs (just in case). If I do not find anything, maybe you should be running Import?')
else:
watchvals = []
for wv in comicseries:
@ -248,7 +247,7 @@ class PostProcessor(object):
wv_publisher = wv['ComicPublisher']
wv_total = wv['Total']
#logger.fdebug('Checking ' + wv['ComicName'] + ' [' + str(wv['ComicYear']) + '] -- ' + str(wv['ComicID']))
logger.fdebug('Checking ' + wv['ComicName'] + ' [' + str(wv['ComicYear']) + '] -- ' + str(wv['ComicID']))
#force it to use the Publication Date of the latest issue instead of the Latest Date (which could be anything)
latestdate = myDB.select('SELECT IssueDate from issues WHERE ComicID=? order by ReleaseDate DESC', [wv['ComicID']])
@ -257,6 +256,23 @@ class PostProcessor(object):
else:
latestdate = wv['LatestDate']
logger.fdebug('Latest Date set to :' + str(latestdate))
if latestdate == '0000-00-00' or latestdate == 'None' or latestdate is None:
logger.fdebug('Forcing a refresh of series: ' + wv_comicname + ' as it appears to have incomplete issue dates.')
updater.dbUpdate([wv_comicid])
logger.fdebug('Refresh complete for ' + wv_comicname + '. Rechecking issue dates for completion.')
latestdate = myDB.select('SELECT IssueDate from issues WHERE ComicID=? order by ReleaseDate DESC', [wv['ComicID']])
if latestdate:
latestdate = latestdate[0][0]
else:
latestdate = wv['LatestDate']
logger.fdebug('Latest Date (after forced refresh) set to :' + str(latestdate))
if latestdate == '0000-00-00' or latestdate == 'None' or latestdate is None:
logger.fdebug('Unable to properly attain the Latest Date for series: ' + wv_comicname + '. Cannot check against this series for post-processing.')
continue
watchvals.append({"ComicName": wv_comicname,
"ComicPublisher": wv_comicpublisher,
"AlternateSearch": wv_alternatesearch,
@ -266,7 +282,8 @@ class PostProcessor(object):
"ComicVersion": wv_comicversion,
"Publisher": wv_publisher,
"Total": wv_total,
"ComicID": wv_comicid}
"ComicID": wv_comicid,
"IsArc": False}
})
ccnt=0
@ -297,7 +314,7 @@ class PostProcessor(object):
logger.fdebug(module + ' Annual detected.')
fcdigit = helpers.issuedigits(re.sub('annual', '', str(temploc.lower())).strip())
annchk = "yes"
issuechk = myDB.selectone("SELECT * from annuals WHERE ComicID=? AND Int_IssueNumber=?", [cs['ComicID'], fcdigit]).fetchone()
issuechk = myDB.selectone("SELECT * from issues WHERE ComicID=? AND Int_IssueNumber=?", [cs['ComicID'], fcdigit]).fetchone()
else:
fcdigit = helpers.issuedigits(temploc)
issuechk = myDB.selectone("SELECT * from issues WHERE ComicID=? AND Int_IssueNumber=?", [cs['ComicID'], fcdigit]).fetchone()
@ -360,6 +377,239 @@ class PostProcessor(object):
fn+=1
logger.fdebug(module + ' There are ' + str(len(manual_list)) + ' files found that match on your watchlist, ' + str(nm) + ' do not match anything and will be ignored.')
#we should setup for manual post-processing of story-arc issues here
arc_series = myDB.select("SELECT * FROM readinglist order by ComicName") # by StoryArcID")
manual_arclist = []
if arc_series is None:
logger.error(module + ' No Story Arcs in Watchlist - aborting Manual Post Processing. Maybe you should be running Import?')
return
else:
arcvals = []
for av in arc_series:
arcvals.append({"ComicName": av['ComicName'],
"ArcValues": {"StoryArc": av['StoryArc'],
"StoryArcID": av['StoryArcID'],
"IssueArcID": av['IssueArcID'],
"ComicName": av['ComicName'],
"ComicPublisher": av['IssuePublisher'],
"IssueID": av['IssueID'],
"IssueNumber": av['IssueNumber'],
"IssueYear": av['IssueYear'], #for some reason this is empty
"ReadingOrder": av['ReadingOrder'],
"IssueDate": av['IssueDate'],
"Status": av['Status'],
"Location": av['Location']},
"WatchValues": {"SeriesYear": av['SeriesYear'],
"LatestDate": av['IssueDate'],
"ComicVersion": 'v' + str(av['SeriesYear']),
"Publisher": av['IssuePublisher'],
"Total": av['TotalIssues'], # this will return the total issues in the arc (not needed for this)
"ComicID": av['ComicID'],
"IsArc": True}
})
ccnt=0
nm=0
from collections import defaultdict
res = defaultdict(list)
for acv in arcvals:
res[acv['ComicName']].append({"ArcValues": acv['ArcValues'],
"WatchValues": acv['WatchValues']})
for k,v in res.items():
i = 0
while i < len(v):
#k is ComicName
#v is ArcValues and WatchValues
if k is None or k == 'None':
pass
else:
arcmatch = filechecker.listFiles(self.nzb_folder, k, v[i]['ArcValues']['ComicPublisher'], manual=v[i]['WatchValues'])
if arcmatch['comiccount'] == 0:
pass
else:
fn = 0
fccnt = int(arcmatch['comiccount'])
if len(arcmatch) == 1: break
while (fn < fccnt):
try:
tmpfc = arcmatch['comiclist'][fn]
except IndexError, KeyError:
break
temploc= tmpfc['JusttheDigits'].replace('_', ' ')
temploc = re.sub('[\#\']', '', temploc)
if 'annual' in temploc.lower():
biannchk = re.sub('-', '', temploc.lower()).strip()
if 'biannual' in biannchk:
logger.fdebug(module + ' Bi-Annual detected.')
fcdigit = helpers.issuedigits(re.sub('biannual', '', str(biannchk)).strip())
else:
logger.fdebug(module + ' Annual detected.')
fcdigit = helpers.issuedigits(re.sub('annual', '', str(temploc.lower())).strip())
annchk = "yes"
issuechk = myDB.selectone("SELECT * from readinglist WHERE ComicID=? AND Int_IssueNumber=?", [v[i]['WatchValues']['ComicID'], fcdigit]).fetchone()
else:
logger.info('Issue Number :' + str(temploc))
fcdigit = helpers.issuedigits(temploc)
issuechk = myDB.selectone("SELECT * from readinglist WHERE ComicID=? AND Int_IssueNumber=?", [v[i]['WatchValues']['ComicID'], fcdigit]).fetchone()
if issuechk is None:
logger.fdebug(module + ' No corresponding issue # found for ' + str(v[i]['WatchValues']['ComicID']))
else:
datematch = "True"
if len(arcmatch) >= 1 and tmpfc['ComicYear'] is not None:
#if the # of matches is more than 1, we need to make sure we get the right series
#compare the ReleaseDate for the issue, to the found issue date in the filename.
#if ReleaseDate doesn't exist, use IssueDate
#if no issue date was found, then ignore.
issyr = None
logger.fdebug('issuedate:' + str(issuechk['IssueDate']))
logger.fdebug('issuechk: ' + str(issuechk['IssueDate'][5:7]))
logger.info('ReleaseDate: ' + str(issuechk['StoreDate']))
logger.info('IssueDate: ' + str(issuechk['IssueDate']))
if issuechk['StoreDate'] is not None and issuechk['StoreDate'] != '0000-00-00':
monthval = issuechk['StoreDate']
if int(issuechk['StoreDate'][:4]) < int(tmpfc['ComicYear']):
logger.fdebug(module + ' ' + str(issuechk['StoreDate']) + ' is before the issue year of ' + str(tmpfc['ComicYear']) + ' that was discovered in the filename')
datematch = "False"
else:
monthval = issuechk['IssueDate']
if int(issuechk['IssueDate'][:4]) < int(tmpfc['ComicYear']):
logger.fdebug(module + ' ' + str(issuechk['IssueDate']) + ' is before the issue year ' + str(tmpfc['ComicYear']) + ' that was discovered in the filename')
datematch = "False"
if int(monthval[5:7]) == 11 or int(monthval[5:7]) == 12:
issyr = int(monthval[:4]) + 1
logger.fdebug(module + ' IssueYear (issyr) is ' + str(issyr))
elif int(monthval[5:7]) == 1 or int(monthval[5:7]) == 2 or int(monthval[5:7]) == 3:
issyr = int(monthval[:4]) - 1
if datematch == "False" and issyr is not None:
logger.fdebug(module + ' ' + str(issyr) + ' comparing to ' + str(tmpfc['ComicYear']) + ' : rechecking by month-check versus year.')
datematch = "True"
if int(issyr) != int(tmpfc['ComicYear']):
logger.fdebug(module + '[.:FAIL:.] Issue is before the modified issue year of ' + str(issyr))
datematch = "False"
else:
logger.info(module + ' Found matching issue # ' + str(fcdigit) + ' for ComicID: ' + str(v[i]['WatchValues']['ComicID']) + ' / IssueID: ' + str(issuechk['IssueID']))
if datematch == "True" and helpers.issuedigits(temploc) == helpers.issuedigits(v[i]['ArcValues']['IssueNumber']):
passit = False
if len(manual_list) > 0:
if any([ v[i]['ArcValues']['IssueID'] == x['IssueID'] for x in manual_list ]):
logger.info('[STORY-ARC POST-PROCESSING] IssueID ' + str(v[i]['ArcValues']['IssueID']) + ' exists in your watchlist. Bypassing Story-Arc post-processing performed later.')
#add in the storyarcid into the manual list so it will perform story-arc functions after normal manual PP is finished.
for a in manual_list:
if a['IssueID'] == v[i]['ArcValues']['IssueID']:
a['IssueArcID'] = v[i]['ArcValues']['IssueArcID']
break
passit = True
if passit == False:
logger.info('[' + k + ' #' + str(issuechk['IssueNumber']) + '] MATCH: ' + tmpfc['ComicLocation'] + ' / ' + str(issuechk['IssueID']) + ' / ' + str(v[i]['ArcValues']['IssueID']))
manual_arclist.append({"ComicLocation": tmpfc['ComicLocation'],
"ComicID": v[i]['WatchValues']['ComicID'],
"IssueID": v[i]['ArcValues']['IssueID'],
"IssueNumber": v[i]['ArcValues']['IssueNumber'],
"StoryArc": v[i]['ArcValues']['StoryArc'],
"IssueArcID": v[i]['ArcValues']['IssueArcID'],
"ReadingOrder": v[i]['ArcValues']['ReadingOrder'],
"ComicName": k})
else:
logger.fdebug(module + ' Incorrect series - not populating..continuing post-processing')
fn+=1
i+=1
if len(manual_arclist) > 0:
logger.info('[STORY-ARC MANUAL POST-PROCESSING] I have found ' + str(len(manual_arclist)) + ' issues that belong to Story Arcs. Flinging them into the correct directories.')
for ml in manual_arclist:
issueid = ml['IssueID']
ofilename = ml['ComicLocation']
logger.info('[STORY-ARC POST-PROCESSING] Enabled for ' + ml['StoryArc'])
arcdir = helpers.filesafe(ml['StoryArc'])
if mylar.REPLACE_SPACES:
arcdir = arcdir.replace(' ', mylar.REPLACE_CHAR)
if mylar.STORYARCDIR:
storyarcd = os.path.join(mylar.DESTINATION_DIR, "StoryArcs", arcdir)
logger.fdebug(module + ' Story Arc Directory set to : ' + storyarcd)
grdst = storyarcd
else:
logger.fdebug(module + ' Story Arc Directory set to : ' + mylar.GRABBAG_DIR)
storyarcd = os.path.join(mylar.DESTINATION_DIR, mylar.GRABBAG_DIR)
grdst = mylar.DESTINATION_DIR
#tag the meta.
if mylar.ENABLE_META:
logger.info('[STORY-ARC POST-PROCESSING] Metatagging enabled - proceeding...')
try:
import cmtagmylar
metaresponse = cmtagmylar.run(self.nzb_folder, issueid=issueid, filename=ofilename)
except ImportError:
logger.warn(module + ' comictaggerlib not found on system. Ensure the ENTIRE lib directory is located within mylar/lib/comictaggerlib/')
metaresponse = "fail"
if metaresponse == "fail":
logger.fdebug(module + ' Unable to write metadata successfully - check mylar.log file.')
elif metaresponse == "unrar error":
logger.error(module + ' This is a corrupt archive - whether CRC errors or it is incomplete. Marking as BAD, and retrying it.')
#launch failed download handling here.
else:
ofilename = os.path.split(metaresponse)[1]
logger.info(module + ' Sucessfully wrote metadata to .cbz (' + ofilename + ') - Continuing..')
self._log('Sucessfully wrote metadata to .cbz (' + ofilename + ') - proceeding...')
filechecker.validateAndCreateDirectory(grdst, True, module=module)
#if from a StoryArc, check to see if we're appending the ReadingOrder to the filename
if mylar.READ2FILENAME:
logger.fdebug(module + ' readingorder#: ' + str(ml['ReadingOrder']))
if int(ml['ReadingOrder']) < 10: readord = "00" + str(ml['ReadingOrder'])
elif int(ml['ReadingOrder']) > 10 and int(ml['ReadingOrder']) < 99: readord = "0" + str(ml['ReadingOrder'])
else: readord = str(ml['ReadingOrder'])
dfilename = str(readord) + "-" + ofilename
else:
dfilename = ofilename
grab_dst = os.path.join(grdst, dfilename)
logger.fdebug(module + ' Destination Path : ' + grab_dst)
grab_src = os.path.join(self.nzb_folder, ofilename)
logger.fdebug(module + ' Source Path : ' + grab_src)
logger.info(module + ' Moving ' + str(ofilename) + ' into directory : ' + str(grab_dst))
try:
shutil.move(grab_src, grab_dst)
except (OSError, IOError):
logger.warn(module + ' Failed to move directory - check directories and manually re-run.')
return
#tidyup old path
try:
pass
#shutil.rmtree(self.nzb_folder)
except (OSError, IOError):
logger.warn(module + ' Failed to remove temporary directory - check directory and manually re-run.')
return
logger.fdebug(module + ' Removed temporary directory : ' + str(self.nzb_folder))
#delete entry from nzblog table
IssArcID = 'S' + str(ml['IssueArcID'])
myDB.action('DELETE from nzblog WHERE IssueID=? AND SARC=?', [IssArcID,ml['StoryArc']])
logger.fdebug(module + ' IssueArcID: ' + str(ml['IssueArcID']))
ctrlVal = {"IssueArcID": ml['IssueArcID']}
newVal = {"Status": "Downloaded",
"Location": grab_dst}
logger.fdebug('writing: ' + str(newVal) + ' -- ' + str(ctrlVal))
myDB.upsert("readinglist", newVal, ctrlVal)
logger.fdebug(module + ' [' + ml['StoryArc'] + '] Post-Processing completed for: ' + grab_dst)
else:
nzbname = self.nzb_name
@ -602,11 +852,15 @@ class PostProcessor(object):
#check to see if file is still being written to.
while True:
waiting = False
ctime = max(os.path.getctime(ml['ComicLocation']), os.path.getmtime(ml['ComicLocation']))
if time.time() > ctime > time.time() - 15:
time.sleep(max(time.time() - ctime, 0))
waiting = True
else:
try:
ctime = max(os.path.getctime(ml['ComicLocation']), os.path.getmtime(ml['ComicLocation']))
if time.time() > ctime > time.time() - 15:
time.sleep(max(time.time() - ctime, 0))
waiting = True
else:
break
except:
#file is no longer present in location / can't be accessed.
break
dupthis = helpers.duplicate_filecheck(ml['ComicLocation'], ComicID=comicid, IssueID=issueid)
@ -1073,10 +1327,11 @@ class PostProcessor(object):
except OSError:
logger.error(module + ' Failed to change file permissions. Ensure that the user running Mylar has proper permissions to change permissions in : ' + dst)
logger.fdebug(module + ' Continuing post-processing but unable to change file permissions in ' + dst)
#delete entry from nzblog table
myDB.action('DELETE from nzblog WHERE issueid=?', [issueid])
#update snatched table to change status to Downloaded
#delete entry from nzblog table
myDB.action('DELETE from nzblog WHERE issueid=?', [issueid])
#update snatched table to change status to Downloaded
if annchk == "no":
updater.foundsearch(comicid, issueid, down=downtype, module=module)
dispiss = 'issue: ' + issuenumOG
@ -1090,6 +1345,65 @@ class PostProcessor(object):
#force rescan of files
updater.forceRescan(comicid, module=module)
try:
if ml['IssueArcID']:
logger.info('Watchlist Story Arc match detected.')
arcinfo = myDB.selectone('SELECT * FROM readinglist where IssueArcID=?', [ml['IssueArcID']]).fetchone()
if arcinfo is None:
logger.warn('Unable to locate IssueID within givin Story Arc. Ensure everything is up-to-date (refreshed) for the Arc.')
else:
arcdir = helpers.filesafe(arcinfo['StoryArc'])
if mylar.REPLACE_SPACES:
arcdir = arcdir.replace(' ', mylar.REPLACE_CHAR)
if mylar.STORYARCDIR:
storyarcd = os.path.join(mylar.DESTINATION_DIR, "StoryArcs", arcdir)
logger.fdebug(module + ' Story Arc Directory set to : ' + storyarcd)
grdst = storyarcd
else:
logger.fdebug(module + ' Story Arc Directory set to : ' + mylar.GRABBAG_DIR)
storyarcd = os.path.join(mylar.DESTINATION_DIR, mylar.GRABBAG_DIR)
grdst = mylar.DESTINATION_DIR
filechecker.validateAndCreateDirectory(grdst, True, module=module)
if mylar.READ2FILENAME:
logger.fdebug(module + ' readingorder#: ' + str(arcinfo['ReadingOrder']))
if int(arcinfo['ReadingOrder']) < 10: readord = "00" + str(arcinfo['ReadingOrder'])
elif int(arcinfo['ReadingOrder']) > 10 and int(arcinfo['ReadingOrder']) < 99: readord = "0" + str(arcinfo['ReadingOrder'])
else: readord = str(arcinfo['ReadingOrder'])
dfilename = str(readord) + "-" + os.path.split(dst)[1]
else:
dfilename = os.path.split(dst)[1]
grab_dst = os.path.join(grdst, dfilename)
logger.fdebug(module + ' Destination Path : ' + grab_dst)
grab_src = dst
logger.fdebug(module + ' Source Path : ' + grab_src)
logger.info(module + ' Copying ' + str(dst) + ' into directory : ' + str(grab_dst))
try:
shutil.copy(grab_src, grab_dst)
except (OSError, IOError):
logger.warn(module + ' Failed to move directory - check directories and manually re-run.')
return
#delete entry from nzblog table in case it was forced via the Story Arc Page
IssArcID = 'S' + str(ml['IssueArcID'])
myDB.action('DELETE from nzblog WHERE IssueID=? AND SARC=?', [IssArcID,arcinfo['StoryArc']])
logger.fdebug(module + ' IssueArcID: ' + str(ml['IssueArcID']))
ctrlVal = {"IssueArcID": ml['IssueArcID']}
newVal = {"Status": "Downloaded",
"Location": grab_dst}
logger.fdebug('writing: ' + str(newVal) + ' -- ' + str(ctrlVal))
myDB.upsert("readinglist", newVal, ctrlVal)
logger.fdebug(module + ' [' + arcinfo['StoryArc'] + '] Post-Processing completed for: ' + grab_dst)
except:
pass
if mylar.WEEKFOLDER:
#if enabled, will *copy* the post-processed file to the weeklypull list folder for the given week.
weeklypull.weekly_singlecopy(comicid, issuenum, str(nfilename +ext), dst, module=module, issueid=issueid)

View File

@ -115,6 +115,7 @@ HTTPS_KEY = None
HTTPS_FORCE_ON = False
API_ENABLED = False
API_KEY = None
DOWNLOAD_APIKEY = None
LAUNCH_BROWSER = False
LOGVERBOSE = None
GIT_PATH = None
@ -214,6 +215,7 @@ SAB_PASSWORD = None
SAB_APIKEY = None
SAB_CATEGORY = None
SAB_PRIORITY = None
SAB_TO_MYLAR = False
SAB_DIRECTORY = None
USE_NZBGET = False
@ -394,10 +396,10 @@ def initialize():
with INIT_LOCK:
global __INITIALIZED__, DBCHOICE, DBUSER, DBPASS, DBNAME, COMICVINE_API, DEFAULT_CVAPI, CVAPI_COUNT, CVAPI_TIME, CVAPI_MAX, FULL_PATH, PROG_DIR, VERBOSE, DAEMON, UPCOMING_SNATCHED, COMICSORT, DATA_DIR, CONFIG_FILE, CFG, CONFIG_VERSION, LOG_DIR, CACHE_DIR, MAX_LOGSIZE, LOGVERBOSE, OLDCONFIG_VERSION, OS_DETECT, \
queue, HTTP_PORT, HTTP_HOST, HTTP_USERNAME, HTTP_PASSWORD, HTTP_ROOT, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, HTTPS_FORCE_ON, API_ENABLED, API_KEY, LAUNCH_BROWSER, GIT_PATH, SAFESTART, AUTO_UPDATE, \
queue, HTTP_PORT, HTTP_HOST, HTTP_USERNAME, HTTP_PASSWORD, HTTP_ROOT, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, HTTPS_FORCE_ON, API_ENABLED, API_KEY, DOWNLOAD_APIKEY, LAUNCH_BROWSER, GIT_PATH, SAFESTART, AUTO_UPDATE, \
CURRENT_VERSION, LATEST_VERSION, CHECK_GITHUB, CHECK_GITHUB_ON_STARTUP, CHECK_GITHUB_INTERVAL, USER_AGENT, DESTINATION_DIR, MULTIPLE_DEST_DIRS, CREATE_FOLDERS, \
DOWNLOAD_DIR, USENET_RETENTION, SEARCH_INTERVAL, NZB_STARTUP_SEARCH, INTERFACE, DUPECONSTRAINT, AUTOWANT_ALL, AUTOWANT_UPCOMING, ZERO_LEVEL, ZERO_LEVEL_N, COMIC_COVER_LOCAL, HIGHCOUNT, \
LIBRARYSCAN, LIBRARYSCAN_INTERVAL, DOWNLOAD_SCAN_INTERVAL, NZB_DOWNLOADER, USE_SABNZBD, SAB_HOST, SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_PRIORITY, SAB_DIRECTORY, USE_BLACKHOLE, BLACKHOLE_DIR, ADD_COMICS, COMIC_DIR, IMP_MOVE, IMP_RENAME, IMP_METADATA, \
LIBRARYSCAN, LIBRARYSCAN_INTERVAL, DOWNLOAD_SCAN_INTERVAL, NZB_DOWNLOADER, USE_SABNZBD, SAB_HOST, SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_PRIORITY, SAB_TO_MYLAR, SAB_DIRECTORY, USE_BLACKHOLE, BLACKHOLE_DIR, ADD_COMICS, COMIC_DIR, IMP_MOVE, IMP_RENAME, IMP_METADATA, \
USE_NZBGET, NZBGET_HOST, NZBGET_PORT, NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_PRIORITY, NZBGET_DIRECTORY, NZBSU, NZBSU_UID, NZBSU_APIKEY, DOGNZB, DOGNZB_APIKEY, \
NEWZNAB, NEWZNAB_NAME, NEWZNAB_HOST, NEWZNAB_APIKEY, NEWZNAB_UID, NEWZNAB_ENABLED, EXTRA_NEWZNABS, NEWZNAB_EXTRA, \
RAW, RAW_PROVIDER, RAW_USERNAME, RAW_PASSWORD, RAW_GROUPS, EXPERIMENTAL, ALTEXPERIMENTAL, \
@ -670,6 +672,7 @@ def initialize():
SAB_PASSWORD = check_setting_str(CFG, 'SABnzbd', 'sab_password', '')
SAB_APIKEY = check_setting_str(CFG, 'SABnzbd', 'sab_apikey', '')
SAB_CATEGORY = check_setting_str(CFG, 'SABnzbd', 'sab_category', '')
SAB_TO_MYLAR = bool(check_setting_int(CFG, 'SABnzbd', 'sab_to_mylar', 0))
SAB_DIRECTORY = check_setting_str(CFG, 'SABnzbd', 'sab_directory', '')
SAB_PRIORITY = check_setting_str(CFG, 'SABnzbd', 'sab_priority', '')
if SAB_PRIORITY.isdigit():
@ -1299,6 +1302,7 @@ def config_write():
new_config['SABnzbd']['sab_apikey'] = SAB_APIKEY
new_config['SABnzbd']['sab_category'] = SAB_CATEGORY
new_config['SABnzbd']['sab_priority'] = SAB_PRIORITY
new_config['SABnzbd']['sab_to_mylar'] = int(SAB_TO_MYLAR)
new_config['SABnzbd']['sab_directory'] = SAB_DIRECTORY
new_config['NZBGet'] = {}
@ -1459,7 +1463,7 @@ def dbcheck():
# c.execute('CREATE TABLE IF NOT EXISTS sablog (nzo_id TEXT, ComicName TEXT, ComicYEAR TEXT, ComicIssue TEXT, name TEXT, nzo_complete TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS importresults (impID TEXT, ComicName TEXT, ComicYear TEXT, Status TEXT, ImportDate TEXT, ComicFilename TEXT, ComicLocation TEXT, WatchMatch TEXT, DisplayName TEXT, SRID TEXT, ComicID TEXT, IssueID TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS readlist (IssueID TEXT, ComicName TEXT, Issue_Number TEXT, Status TEXT, DateAdded TEXT, Location TEXT, inCacheDir TEXT, SeriesYear TEXT, ComicID TEXT, StatusChange TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS readinglist(StoryArcID TEXT, ComicName TEXT, IssueNumber TEXT, SeriesYear TEXT, IssueYEAR TEXT, StoryArc TEXT, TotalIssues TEXT, Status TEXT, inCacheDir TEXT, Location TEXT, IssueArcID TEXT, ReadingOrder INT, IssueID TEXT, ComicID TEXT, StoreDate TEXT, IssueDate TEXT, Publisher TEXT, IssuePublisher TEXT, IssueName TEXT, CV_ArcID TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS readinglist(StoryArcID TEXT, ComicName TEXT, IssueNumber TEXT, SeriesYear TEXT, IssueYEAR TEXT, StoryArc TEXT, TotalIssues TEXT, Status TEXT, inCacheDir TEXT, Location TEXT, IssueArcID TEXT, ReadingOrder INT, IssueID TEXT, ComicID TEXT, StoreDate TEXT, IssueDate TEXT, Publisher TEXT, IssuePublisher TEXT, IssueName TEXT, CV_ArcID TEXT, Int_IssueNumber INT)')
c.execute('CREATE TABLE IF NOT EXISTS annuals (IssueID TEXT, Issue_Number TEXT, IssueName TEXT, IssueDate TEXT, Status TEXT, ComicID TEXT, GCDComicID TEXT, Location TEXT, ComicSize TEXT, Int_IssueNumber INT, ComicName TEXT, ReleaseDate TEXT, ReleaseComicID TEXT, ReleaseComicName TEXT, IssueDate_Edit TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS rssdb (Title TEXT UNIQUE, Link TEXT, Pubdate TEXT, Site TEXT, Size TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS futureupcoming (ComicName TEXT, IssueNumber TEXT, ComicID TEXT, IssueID TEXT, IssueDate TEXT, Publisher TEXT, Status TEXT, DisplayComicName TEXT)')
@ -1798,6 +1802,11 @@ def dbcheck():
except sqlite3.OperationalError:
c.execute('ALTER TABLE readinglist ADD COLUMN CV_ArcID TEXT')
try:
c.execute('SELECT Int_IssueNumber from readinglist')
except sqlite3.OperationalError:
c.execute('ALTER TABLE readinglist ADD COLUMN Int_IssueNumber INT')
## -- searchresults Table --
try:
c.execute('SELECT SRID from searchresults')

View File

@ -30,7 +30,7 @@ from cherrypy.lib.static import serve_file, serve_download
cmd_list = ['getIndex', 'getComic', 'getUpcoming', 'getWanted', 'getHistory', 'getLogs',
'findComic', 'addComic', 'delComic', 'pauseComic', 'resumeComic', 'refreshComic',
'addIssue', 'queueIssue', 'unqueueIssue', 'forceSearch', 'forceProcess', 'getVersion', 'checkGithub',
'shutdown', 'restart', 'update', 'getComicInfo', 'getIssueInfo', 'getArt', 'downloadIssue']
'shutdown', 'restart', 'update', 'getComicInfo', 'getIssueInfo', 'getArt', 'downloadIssue', 'downloadNZB']
class Api(object):
@ -48,28 +48,30 @@ class Api(object):
def checkParams(self, *args, **kwargs):
if not mylar.API_ENABLED:
self.data = 'API not enabled'
return
if not mylar.API_KEY:
self.data = 'API key not generated'
return
if len(mylar.API_KEY) != 32:
self.data = 'API key not generated correctly'
return
if 'apikey' not in kwargs:
self.data = 'Missing api key'
return
if kwargs['apikey'] != mylar.API_KEY:
if 'cmd' not in kwargs:
self.data = 'Missing parameter: cmd'
return
if not mylar.API_ENABLED:
if kwargs['apikey'] != mylar.DOWNLOAD_APIKEY:
self.data = 'API not enabled'
return
if kwargs['apikey'] != mylar.API_KEY and kwargs['apikey'] != mylar.DOWNLOAD_APIKEY:
self.data = 'Incorrect API key'
return
else:
self.apikey = kwargs.pop('apikey')
if 'cmd' not in kwargs:
self.data = 'Missing parameter: cmd'
if not mylar.API_KEY:
self.data = 'API key not generated'
return
if len(mylar.API_KEY) != 32:
self.data = 'API key not generated correctly'
return
if kwargs['cmd'] not in cmd_list:
@ -84,7 +86,7 @@ class Api(object):
def fetchData(self):
if self.data == 'OK':
logger.info('Recieved API command: ' + self.cmd)
logger.fdebug('Recieved API command: ' + self.cmd)
methodToCall = getattr(self, "_" + self.cmd)
result = methodToCall(**self.kwargs)
if 'callback' not in self.kwargs:
@ -421,3 +423,18 @@ class Api(object):
else:
self.data = 'You need to download that issue first'
return
def _downloadNZB(self, nzbname):
if not nzbname:
self.data = 'You need to provide a nzbname'
return
self.nzbname = nzbname
f = os.path.join(mylar.CACHE_DIR, nzbname)
if os.path.isfile(f):
self.file = f
self.filename = nzbname
else:
self.data = 'NZBname does not exist within the cache directory. Unable to retrieve.'
return

View File

@ -961,15 +961,16 @@ def listFiles(dir, watchcomic, Publisher, AlternateSearch=None, manual=None, sar
if subnm[cnt] == ' ':
pass
else:
logger.fdebug('[FILECHECKER] ' + str(cnt) + ' Bracket Word: ' + subnm[cnt])
strip_sub = subnm[cnt].strip()
logger.fdebug('[FILECHECKER] ' + str(cnt) + ' Bracket Word: ' + strip_sub + '/' + str(len(strip_sub)))
#if ComVersChk == 0:
# logger.fdebug('[FILECHECKER] Series version detected as V1 (only series in existance with that title). Bypassing year check')
# yearmatch = "true"
# break
if (subnm[cnt].startswith('19') or subnm[cnt].startswith('20')) and len(subnm[cnt]) == 4:
logger.fdebug('[FILECHECKER] year detected: ' + subnm[cnt])
result_comyear = subnm[cnt]
if any([strip_sub.startswith('19'), strip_sub.startswith('20')]) and len(strip_sub) == 4:
logger.fdebug('[FILECHECKER] year detected: ' + strip_sub)
result_comyear = strip_sub
##### - checking to see what removing this does for the masses
if int(result_comyear) <= int(maxyear) and int(result_comyear) >= int(comyear):
logger.fdebug('[FILECHECKER] ' + str(result_comyear) + ' is within the series range of ' + str(comyear) + '-' + str(maxyear))

View File

@ -47,10 +47,13 @@ def search_init(ComicName, IssueNumber, ComicYear, SeriesYear, Publisher, IssueD
unaltered_ComicName = ComicName
#ComicName = filesafe
#logger.info('AlternateSearch is : ' + AlternateSearch)
if ComicYear == None: ComicYear = '2014'
else: ComicYear = str(ComicYear)[:4]
if ComicYear == None:
ComicYear = str(datetime.datetime.now().year)
else:
ComicYear = str(ComicYear)[:4]
if Publisher:
if Publisher == 'IDW Publishing': Publisher = 'IDW'
if Publisher == 'IDW Publishing':
Publisher = 'IDW'
logger.fdebug('Publisher is : ' + Publisher)
issuetitle = helpers.get_issue_title(IssueID)
if issuetitle:
@ -1673,7 +1676,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
nzo_info = {}
filen = None
nzbmega = False
payload = None
headers = {'User-Agent': str(mylar.USER_AGENT)}
#link doesn't have the apikey - add it and use ?t=get for newznab based.
@ -1689,17 +1692,25 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
else:
host_newznab_fix = host_newznab
apikey = newznab[2].rstrip()
down_url = host_newznab_fix + 'api'
verify = False
if 'warp?x=' in link:
logger.fdebug('NZBMegaSearch url detected. Adjusting...')
nzbmega = True
else:
apikey = newznab[2].rstrip()
down_url = host_newznab_fix + 'api'
verify = False
else:
down_url = 'https://api.nzb.su/api?'
apikey = mylar.NZBSU_APIKEY
verify = True #unsure if verify should be set to True for nzb.su or not.
payload = {'t': 'get',
'id': str(nzbid),
'apikey': str(apikey)}
if nzbmega == True:
down_url = link
verify = False
else:
payload = {'t': 'get',
'id': str(nzbid),
'apikey': str(apikey)}
logger.fdebug('payload:' + str(payload))
@ -1763,7 +1774,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
if payload is None:
logger.error('Unable to download nzb from link: ' + str(down_url) + ' [' + link + ']')
else:
errorlink = down_url + urllib.urlencode(payload)
errorlink = down_url + '?' + urllib.urlencode(payload)
logger.error('Unable to download nzb from link: ' + str(errorlink) + ' [' + link + ']')
else:
#convert to a generic type of format to help with post-processing.
@ -1784,17 +1795,16 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
alt_nzbname = re.sub('[\s\_]', '.', alt_nzbname)
logger.info('filen: ' + alt_nzbname + ' -- nzbname: ' + nzbname + ' are not identical. Storing extra value as : ' + alt_nzbname)
#make sure the cache directory exists - if not, create it.
tmppath = mylar.CACHE_DIR
if os.path.exists(tmppath):
logger.fdebug("cache directory successfully found at : " + str(tmppath))
#make sure the cache directory exists - if not, create it (used for storing nzbs).
if os.path.exists(mylar.CACHE_DIR):
logger.fdebug("Cache Directory successfully found at : " + mylar.CACHE_DIR)
pass
else:
#let's make the dir.
logger.fdebug("couldn't locate cache directory, attempting to create at : " + str(mylar.CACHE_DIR))
logger.fdebug("Could not locate Cache Directory, attempting to create at : " + mylar.CACHE_DIR)
try:
os.makedirs(str(mylar.CACHE_DIR))
logger.info(u"Cache Directory successfully created at: " + str(mylar.CACHE_DIR))
os.makedirs(mylar.CACHE_DIR)
logger.info("Temporary NZB Download Directory successfully created at: " + mylar.CACHE_DIR)
except OSError.e:
if e.errno != errno.EEXIST:
raise
@ -1844,14 +1854,6 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
if mylar.USE_BLACKHOLE and nzbprov != '32P' and nzbprov != 'KAT':
logger.fdebug("using blackhole directory at : " + str(mylar.BLACKHOLE_DIR))
if os.path.exists(mylar.BLACKHOLE_DIR):
# Add a user-agent
#request = urllib2.Request(linkapi) #(str(mylar.BLACKHOLE_DIR) + str(filenamenzb))
#request.add_header('User-Agent', str(mylar.USER_AGENT))
#try:
# opener = helpers.urlretrieve(urllib2.urlopen(request), str(mylar.BLACKHOLE_DIR) + str(nzbname) + '.nzb')
#except Exception, e:
# logger.warn('Error fetching data from %s: %s' % (nzbprov, e))
# return "blackhole-fail"
#copy the nzb from nzbpath to blackhole dir.
try:
shutil.move(nzbpath, os.path.join(mylar.BLACKHOLE_DIR, nzbname))
@ -1930,13 +1932,36 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
logger.fdebug("send-to-SAB host &api initiation string : " + str(helpers.apiremove(tmpapi, '&')))
SABtype = "&mode=addlocalfile&name="
SABtype = "&mode=addurl&name="
#generate the api key to download here and then kill it immediately after.
if mylar.DOWNLOAD_APIKEY is None:
import hashlib, random
mylar.DOWNLOAD_APIKEY = hashlib.sha224(str(random.getrandbits(256))).hexdigest()[0:32]
if mylar.ENABLE_HTTPS:
proto = 'https://'
else:
proto = 'http://'
if mylar.HTTP_ROOT is None:
hroot = '/'
elif mylar.HTTP_ROOT.endswith('/'):
hroot = mylar.HTTP_ROOT
else:
if mylar.HTTP_ROOT != '/':
hroot = mylar.HTTP_ROOT + '/'
else:
hroot = mylar.HTTP_ROOT
fileURL = proto + str(mylar.HTTP_HOST) + ':' + str(mylar.HTTP_PORT) + hroot + 'api?apikey=' + mylar.DOWNLOAD_APIKEY + '&cmd=downloadNZB&nzbname=' + nzbname
tmpapi = tmpapi + SABtype
logger.fdebug("...selecting API type: " + str(tmpapi))
tmpapi = tmpapi + urllib.quote_plus(nzbpath)
logger.fdebug("...attaching nzb provider link: " + str(helpers.apiremove(tmpapi, '$')))
tmpapi = tmpapi + urllib.quote_plus(fileURL)
logger.fdebug("...attaching nzb via internal Mylar API: " + str(helpers.apiremove(tmpapi, '$')))
# determine SAB priority
if mylar.SAB_PRIORITY:
tmpapi = tmpapi + "&priority=" + sabpriority
@ -1969,6 +1994,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc
requests.put(tmpapi, verify=False)
except:
logger.error('Unable to send nzb file to SABnzbd')
mylar.DOWNLOAD_APIKEY = None
return "sab-fail"
# this works for non-http sends to sab (when both sab AND provider are non-https)
@ -2205,7 +2231,9 @@ def generate_id(nzbprov, link):
elif nzbprov == 'newznab':
#if in format of http://newznab/getnzb/<id>.nzb&i=1&r=apikey
tmpid = urlparse.urlparse(link)[4] #param 4 is the query string from the url.
if tmpid == '' or tmpid is None:
if 'warp' in urlparse.urlparse(link)[2] and 'x=' in tmpid:
nzbid = os.path.splitext(link)[0].rsplit('x=',1)[1]
elif tmpid == '' or tmpid is None:
nzbid = os.path.splitext(link)[0].rsplit('/', 1)[1]
else:
# for the geek in all of us...

View File

@ -416,102 +416,105 @@ class WebInterface(object):
issnum = arcval['Issue_Number']
issdate = str(arcval['Issue_Date'])
storedate = str(arcval['Store_Date'])
if issnum.isdigit():
int_issnum = int(issnum) * 1000
else:
if 'a.i.' in issnum.lower() or 'ai' in issnum.lower():
issnum = re.sub('\.', '', issnum)
#int_issnum = (int(issnum[:-2]) * 1000) + ord('a') + ord('i')
if 'au' in issnum.lower():
int_issnum = (int(issnum[:-2]) * 1000) + ord('a') + ord('u')
elif 'inh' in issnum.lower():
int_issnum = (int(issnum[:-4]) * 1000) + ord('i') + ord('n') + ord('h')
elif 'now' in issnum.lower():
int_issnum = (int(issnum[:-4]) * 1000) + ord('n') + ord('o') + ord('w')
elif u'\xbd' in issnum:
int_issnum = .5 * 1000
logger.fdebug(module + ' 1/2 issue detected :' + issnum + ' === ' + str(int_issnum))
elif u'\xbc' in issnum:
int_issnum = .25 * 1000
elif u'\xbe' in issnum:
int_issnum = .75 * 1000
elif u'\u221e' in issnum:
#issnum = utf-8 will encode the infinity symbol without any help
int_issnum = 9999999999 * 1000 # set 9999999999 for integer value of issue
elif '.' in issnum or ',' in issnum:
if ',' in issnum: issnum = re.sub(',', '.', issnum)
issst = str(issnum).find('.')
#logger.fdebug("issst:" + str(issst))
if issst == 0:
issb4dec = 0
else:
issb4dec = str(issnum)[:issst]
#logger.fdebug("issb4dec:" + str(issb4dec))
#if the length of decimal is only 1 digit, assume it's a tenth
decis = str(issnum)[issst +1:]
#logger.fdebug("decis:" + str(decis))
if len(decis) == 1:
decisval = int(decis) * 10
issaftdec = str(decisval)
elif len(decis) == 2:
decisval = int(decis)
issaftdec = str(decisval)
else:
decisval = decis
issaftdec = str(decisval)
try:
int_issnum = (int(issb4dec) * 1000) + (int(issaftdec) * 10)
except ValueError:
logger.error(module + ' This has no issue # for me to get - Either a Graphic Novel or one-shot.')
updater.no_searchresults(comicid)
return
else:
try:
x = float(issnum)
#validity check
if x < 0:
logger.fdebug(module + ' I have encountered a negative issue #: ' + str(issnum) + '. Trying to accomodate.')
logger.fdebug(module + ' value of x is : ' + str(x))
int_issnum = (int(x) *1000) - 1
else: raise ValueError
except ValueError, e:
x = 0
tstord = None
issno = None
invchk = "false"
while (x < len(issnum)):
if issnum[x].isalpha():
#take first occurance of alpha in string and carry it through
tstord = issnum[x:].rstrip()
issno = issnum[:x].rstrip()
try:
isschk = float(issno)
except ValueError, e:
if len(issnum) == 1 and issnum.isalpha():
logger.fdebug(module + ' Detected lone alpha issue. Attempting to figure this out.')
break
logger.fdebug(module + ' Invalid numeric for issue - cannot be found. Ignoring.')
issno = None
tstord = None
invchk = "true"
break
x+=1
if tstord is not None and issno is not None:
a = 0
ordtot = 0
if len(issnum) == 1 and issnum.isalpha():
int_issnum = ord(tstord.lower())
else:
while (a < len(tstord)):
ordtot += ord(tstord[a].lower()) #lower-case the letters for simplicty
a+=1
int_issnum = (int(issno) * 1000) + ordtot
elif invchk == "true":
logger.fdebug(module + ' This does not have an issue # that I can parse properly.')
return
else:
logger.error(module + ' ' + str(issnum) + ' This has an alpha-numeric in the issue # which I cannot account for.')
return
int_issnum = helpers.issuedigits(issnum)
# if issnum.isdigit():
# int_issnum = int(issnum) * 1000
# else:
# if 'a.i.' in issnum.lower() or 'ai' in issnum.lower():
# issnum = re.sub('\.', '', issnum)
# #int_issnum = (int(issnum[:-2]) * 1000) + ord('a') + ord('i')
# if 'au' in issnum.lower():
# int_issnum = (int(issnum[:-2]) * 1000) + ord('a') + ord('u')
# elif 'inh' in issnum.lower():
# int_issnum = (int(issnum[:-4]) * 1000) + ord('i') + ord('n') + ord('h')
# elif 'now' in issnum.lower():
# int_issnum = (int(issnum[:-4]) * 1000) + ord('n') + ord('o') + ord('w')
# elif u'\xbd' in issnum:
# int_issnum = .5 * 1000
# logger.fdebug(module + ' 1/2 issue detected :' + issnum + ' === ' + str(int_issnum))
# elif u'\xbc' in issnum:
# int_issnum = .25 * 1000
# elif u'\xbe' in issnum:
# int_issnum = .75 * 1000
# elif u'\u221e' in issnum:
# #issnum = utf-8 will encode the infinity symbol without any help
# int_issnum = 9999999999 * 1000 # set 9999999999 for integer value of issue
# elif '.' in issnum or ',' in issnum:
# if ',' in issnum: issnum = re.sub(',', '.', issnum)
# issst = str(issnum).find('.')
# #logger.fdebug("issst:" + str(issst))
# if issst == 0:
# issb4dec = 0
# else:
# issb4dec = str(issnum)[:issst]
# #logger.fdebug("issb4dec:" + str(issb4dec))
# #if the length of decimal is only 1 digit, assume it's a tenth
# decis = str(issnum)[issst +1:]
# #logger.fdebug("decis:" + str(decis))
# if len(decis) == 1:
# decisval = int(decis) * 10
# issaftdec = str(decisval)
# elif len(decis) == 2:
# decisval = int(decis)
# issaftdec = str(decisval)
# else:
# decisval = decis
# issaftdec = str(decisval)
# try:
# int_issnum = (int(issb4dec) * 1000) + (int(issaftdec) * 10)
# except ValueError:
# logger.error(module + ' This has no issue # for me to get - Either a Graphic Novel or one-shot.')
# updater.no_searchresults(comicid)
# return
# else:
# try:
# x = float(issnum)
# #validity check
# if x < 0:
# logger.fdebug(module + ' I have encountered a negative issue #: ' + str(issnum) + '. Trying to accomodate.')
# logger.fdebug(module + ' value of x is : ' + str(x))
# int_issnum = (int(x) *1000) - 1
# else: raise ValueError
# except ValueError, e:
# x = 0
# tstord = None
# issno = None
# invchk = "false"
# while (x < len(issnum)):
# if issnum[x].isalpha():
# #take first occurance of alpha in string and carry it through
# tstord = issnum[x:].rstrip()
# issno = issnum[:x].rstrip()
# try:
# isschk = float(issno)
# except ValueError, e:
# if len(issnum) == 1 and issnum.isalpha():
# logger.fdebug(module + ' Detected lone alpha issue. Attempting to figure this out.')
# break
# logger.fdebug(module + ' Invalid numeric for issue - cannot be found. Ignoring.')
# issno = None
# tstord = None
# invchk = "true"
# break
# x+=1
# if tstord is not None and issno is not None:
# a = 0
# ordtot = 0
# if len(issnum) == 1 and issnum.isalpha():
# int_issnum = ord(tstord.lower())
# else:
# while (a < len(tstord)):
# ordtot += ord(tstord[a].lower()) #lower-case the letters for simplicty
# a+=1
# int_issnum = (int(issno) * 1000) + ordtot
# elif invchk == "true":
# logger.fdebug(module + ' This does not have an issue # that I can parse properly.')
# return
# else:
# logger.error(module + ' ' + str(issnum) + ' This has an alpha-numeric in the issue # which I cannot account for.')
# return
issuedata.append({"ComicID": comicid,
"IssueID": issid,
@ -545,22 +548,23 @@ class WebInterface(object):
issuePublisher = cid['Publisher']
break
newCtrl = {"IssueArcID": AD['IssueArcID'],
"StoryArcID": AD['StoryArcID']}
newVals = {"ComicID": AD['ComicID'],
"IssueID": AD['IssueID'],
"StoryArc": storyarcname,
"ComicName": AD['ComicName'],
"IssueName": IssueName,
"IssueNumber": AD['Issue_Number'],
"Publisher": storyarcpublisher,
"TotalIssues": storyarcissues,
"ReadingOrder": AD['ReadingOrder'],
"IssueDate": AD['IssueDate'],
"StoreDate": AD['ReleaseDate'],
"SeriesYear": seriesYear,
"IssuePublisher": issuePublisher,
"CV_ArcID": arcid}
newCtrl = {"IssueArcID": AD['IssueArcID'],
"StoryArcID": AD['StoryArcID']}
newVals = {"ComicID": AD['ComicID'],
"IssueID": AD['IssueID'],
"StoryArc": storyarcname,
"ComicName": AD['ComicName'],
"IssueName": IssueName,
"IssueNumber": AD['Issue_Number'],
"Publisher": storyarcpublisher,
"TotalIssues": storyarcissues,
"ReadingOrder": AD['ReadingOrder'],
"IssueDate": AD['IssueDate'],
"StoreDate": AD['ReleaseDate'],
"SeriesYear": seriesYear,
"IssuePublisher": issuePublisher,
"CV_ArcID": arcid,
"Int_IssueNumber": AD['Int_IssueNumber']}
myDB.upsert("readinglist", newVals, newCtrl)
@ -797,7 +801,6 @@ class WebInterface(object):
comicsToAdd = [ComicID]
logger.fdebug("Refreshing comic: %s" % comicsToAdd)
threading.Thread(target=updater.dbUpdate, args=[comicsToAdd]).start()
#threading.Thread(target=self.refreshArtist, kwargs=kwargs).start()
refreshSeries.exposed = True
def refreshArtist(self, ComicID):
@ -1108,6 +1111,21 @@ class WebInterface(object):
id = chk_log['ID']
fullprov = chk_log['PROVIDER'] #the full newznab name if it exists will appear here as 'sitename (newznab)'
if all([ComicYear is not None, ComicYear != 'None']) and all([IssueID is not None, IssueID != 'None']):
getYear = myDB.selectone('SELECT IssueDate, ReleaseDate FROM Issues WHERE IssueID=?', [IssueID]).fetchone()
if getYear is None:
logger.warn('Unable to retrieve valid Issue Date for Retry of Issue (Try to refresh the series and then try again.')
return
if getYear['IssueDate'][:4] == '0000':
if getYear['ReleaseDate'][:4] == '0000':
logger.warn('Unable to retrieve valid Issue Date for Retry of Issue (Try to refresh the series and then try again.')
return
else:
ComicYear = getYear['ReleaseDate'][:4]
else:
ComicYear = getYear['IssueDate'][:4]
#now we break it down by provider to recreate the link.
#torrents first.
if Provider == '32P' or Provider == 'KAT':
@ -1142,7 +1160,6 @@ class WebInterface(object):
else:
modcomicname = ComicName + ' Annual'
comicinfo = []
comicinfo.append({"ComicName": ComicName,
"IssueNumber": IssueNumber,
@ -1193,12 +1210,13 @@ class WebInterface(object):
link = str(newznab_host) + 'getnzb/' + str(id) + '.nzb&i=' + str(newznab_uid) + '&r=' + str(newznab_api)
logger.info('newznab detected as : ' + str(newznab_info[0]) + ' @ ' + str(newznab_host))
logger.info('link : ' + str(link))
newznabinfo = (newznab_info[0], newznab_info[1], newznab_info[2], newznab_info[3])
break
else:
logger.error(str(newznab_info[0]) + ' is not enabled - unable to process retry request until provider is re-enabled.')
return
sendit = search.searcher(Provider, nzbname, comicinfo, link=link, IssueID=IssueID, ComicID=ComicID, tmpprov=fullprov, directsend=True)
sendit = search.searcher(Provider, nzbname, comicinfo, link=link, IssueID=IssueID, ComicID=ComicID, tmpprov=fullprov, directsend=True, newznab=newznabinfo)
retryissue.exposed = True
def queueit(self, **kwargs):
@ -2367,8 +2385,12 @@ class WebInterface(object):
#cycle through the story arcs here for matches on the watchlist
if sarc_title != arc['StoryArc']:
arcdir = helpers.filesafe(arc['StoryArc'])
if mylar.REPLACE_SPACES:
arcdir = arcdir.replace(' ', mylar.REPLACE_CHAR)
if mylar.STORYARCDIR:
dstloc = os.path.join(mylar.DESTINATION_DIR, 'StoryArcs', arc['StoryArc'])
dstloc = os.path.join(mylar.DESTINATION_DIR, 'StoryArcs', arcdir)
else:
dstloc = os.path.join(mylar.DESTINATION_DIR, mylar.GRABBAG_DIR)
@ -3246,6 +3268,7 @@ class WebInterface(object):
"sab_cat": mylar.SAB_CATEGORY,
"sab_priority": mylar.SAB_PRIORITY,
"sab_directory": mylar.SAB_DIRECTORY,
"sab_to_mylar": helpers.checked(mylar.SAB_TO_MYLAR),
"nzbget_host": mylar.NZBGET_HOST,
"nzbget_port": mylar.NZBGET_PORT,
"nzbget_user": mylar.NZBGET_USERNAME,
@ -3550,7 +3573,7 @@ class WebInterface(object):
def configUpdate(self, comicvine_api=None, http_host='0.0.0.0', http_username=None, http_port=8090, http_password=None, enable_https=0, https_cert=None, https_key=None, api_enabled=0, api_key=None, launch_browser=0, auto_update=0, logverbose=0, annuals_on=0, max_logsize=None, download_scan_interval=None, nzb_search_interval=None, nzb_startup_search=0, libraryscan_interval=None,
nzb_downloader=0, sab_host=None, sab_username=None, sab_apikey=None, sab_password=None, sab_category=None, sab_priority=None, sab_directory=None, log_dir=None, log_level=0, blackhole_dir=None,
nzb_downloader=0, sab_host=None, sab_username=None, sab_apikey=None, sab_password=None, sab_category=None, sab_priority=None, sab_directory=None, sab_to_mylar=0, log_dir=None, log_level=0, blackhole_dir=None,
nzbget_host=None, nzbget_port=None, nzbget_username=None, nzbget_password=None, nzbget_category=None, nzbget_priority=None, nzbget_directory=None,
usenet_retention=None, nzbsu=0, nzbsu_uid=None, nzbsu_apikey=None, dognzb=0, dognzb_apikey=None, newznab=0, newznab_host=None, newznab_name=None, newznab_apikey=None, newznab_uid=None, newznab_enabled=0,
raw=0, raw_provider=None, raw_username=None, raw_password=None, raw_groups=None, experimental=0, check_folder=None, enable_check_folder=0,
@ -3594,6 +3617,7 @@ class WebInterface(object):
mylar.SAB_APIKEY = sab_apikey
mylar.SAB_CATEGORY = sab_category
mylar.SAB_PRIORITY = sab_priority
mylar.SAB_TO_MYLAR = sab_to_mylar
mylar.SAB_DIRECTORY = sab_directory
mylar.NZBGET_HOST = nzbget_host
mylar.NZBGET_USERNAME = nzbget_username