mirror of https://github.com/evilhero/mylar
IMP: Added ForceRSS Check and Test SABnzbd Connection buttons in Config, FIX: If Annuals not enabled, would error on home screen, IMP: updated mylar.init.d (thnx Kalinon), FIX: Manual Post-Processing fix for Manual Run (thnx Kalinon), IMP: Library Monitor working (check folder every X minutes and Post-Process), IMP: Future Upcoming introduction, IMP: Experimental search better handling of year inclusions, FIX: Filechecker will now pick up series with years in the series title accordingly, FIX: Torrent seedbox sending would lockup occassionally when attempting to send torrent file, FIX: malformed image url on some series, IMP: Moved issue updating to a seperate function, IMP: When series was refreshed, would download the last issue (or few issues depending on date), regardless of status, IMP: When series is volume 1 or volume label doesn't exist, either assume V1 or remove volume requirements to improve matching hits, IMP: StoryArcs will now check in StoryArc folder for existing issues and change status in StoryArc accordingly...
This commit is contained in:
parent
eace56d92e
commit
f1e7b9d50e
|
@ -239,6 +239,9 @@
|
|||
%endfor
|
||||
</select>
|
||||
</div>
|
||||
<div class="row">
|
||||
<a href="#" style="float:right" type="button" onclick="doAjaxCall('SABtest',$(this))" data-success="Sucessfully tested SABnzbd connection" data-error="Error testing SABnzbd connection"><span class="ui-icon ui-icon-extlink"></span>Test SABnzbd</a>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
<div class="row checkbox">
|
||||
|
@ -368,6 +371,7 @@
|
|||
<div class="config">
|
||||
<label>RSS Inteval Feed Check</label>
|
||||
<input type="text" name="rss_checkinterval" value="${config['rss_checkinterval']}" size="6" /><small>(Mins)</small>
|
||||
<a href="#" style="float:right" type="button" onclick="doAjaxCall('force_rss',$(this))" data-success="RSS Force now running" data-error="Error trying to retrieve RSS Feeds"><span class="ui-icon ui-icon-extlink"></span>Force RSS</a>
|
||||
</div>
|
||||
</fieldset>
|
||||
<fieldset>
|
||||
|
|
|
@ -0,0 +1,104 @@
|
|||
<%inherit file="base.html"/>
|
||||
<%!
|
||||
import mylar
|
||||
from mylar import db
|
||||
from mylar.helpers import checked
|
||||
|
||||
%>
|
||||
|
||||
<%def name="headerIncludes()">
|
||||
<div id="subhead_container">
|
||||
<div id="subhead_menu">
|
||||
<a id="menu_link_refresh" href="futurepull">Recreate/Refresh Pull-list</a>
|
||||
</div>
|
||||
</div>
|
||||
<a href="home" class="back">« Back to overview</a>
|
||||
</%def>
|
||||
|
||||
<%def name="body()">
|
||||
<div class="clearfix">
|
||||
<h1>Upcoming Pull list</h1>
|
||||
</div>
|
||||
|
||||
<div class="table_wrapper">
|
||||
|
||||
<table class="display" id="pull_table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th id="publishdate">Date</th>
|
||||
<th id="publisher">Publisher</th>
|
||||
<th id="comicname">Comic</th>
|
||||
<th id="comicnumber">Number</th>
|
||||
<th id="status">Status</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
%for future in futureresults:
|
||||
<tr>
|
||||
%if pullfilter is True:
|
||||
<td class="publishdate">${future['SHIPDATE']}</td>
|
||||
<td class="publisher">${future['PUBLISHER']}</td>
|
||||
<td class="comicname">${future['COMIC']}
|
||||
%if future['EXTRA'] != '':
|
||||
(${future['EXTRA']})
|
||||
%endif
|
||||
</td>
|
||||
<td class="comicnumber">${future['ISSUE']}</td>
|
||||
<td class="status">${future['STATUS']}
|
||||
%if future['STATUS'] == 'Wanted':
|
||||
<a href="unqueueissue?IssueID=None&ComicID=${future['COMICID']}&ComicName=${future['COMIC'] | u}&Issue=${future['ISSUE']}&FutureID=${future['FUTUREID']}"><span class="ui-icon ui-icon-plus"></span>UnWant</a>
|
||||
%elif future['STATUS'] == 'Skipped':
|
||||
<a href="searchit?name=${future['COMIC'] | u}&issue=${future['ISSUE']}&mode=pullseries"><span class="ui-icon ui-icon-plus"></span>add series</a>
|
||||
<a href="queueissue?ComicName=${future['COMIC'] | u}&ComicIssue=${future['ISSUE']}&mode=pullwant"><span class="ui-icon ui-icon-plus"></span>one off</a>
|
||||
<a href="add2futurewatchlist?ComicName=${future['COMIC'] | u}&Issue=${future['ISSUE']}&Publisher=${future['PUBLISHER']}&ShipDate=${future['SHIPDATE']}&FutureID=${future['FUTUREID']}"><span class="ui-icon ui-icon-plus"></span>Watch For</a>
|
||||
%endif
|
||||
</td>
|
||||
%endif
|
||||
</tr>
|
||||
%endfor
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</%def>
|
||||
|
||||
<%def name="headIncludes()">
|
||||
<link rel="stylesheet" href="interfaces/default/css/data_table.css">
|
||||
</%def>
|
||||
|
||||
<%def name="javascriptIncludes()">
|
||||
<script src="js/libs/jquery.dataTables.min.js"></script>
|
||||
<script>
|
||||
|
||||
function initThisPage() {
|
||||
|
||||
initActions();
|
||||
$('#pull_table').dataTable (
|
||||
{
|
||||
"bDestroy": true,
|
||||
//"aoColumnDefs": [
|
||||
// { 'bSortable': false, 'aTargets': [ 2,3 ] }
|
||||
//],
|
||||
"aLengthMenu": [[10, 25, 50, -1], [10, 25, 50, 'All' ]],
|
||||
"oLanguage": {
|
||||
"sLengthMenu":"Show _MENU_ issues per page",
|
||||
"sEmptyTable": "No issue information available",
|
||||
"sInfo":"Showing _TOTAL_ issues",
|
||||
"sInfoEmpty":"Showing 0 to 0 of 0 issues",
|
||||
"sInfoFiltered":"(filtered from _MAX_ total issues)",
|
||||
"sSearch": ""},
|
||||
"bStateSave": true,
|
||||
"iDisplayLength": 25,
|
||||
"sPaginationType": "full_numbers",
|
||||
"aaSorting": [[0, 'desc'],[1, 'asc']]
|
||||
});
|
||||
resetFilters("future");
|
||||
setTimeout(function(){
|
||||
initFancybox();
|
||||
},1500)
|
||||
}
|
||||
|
||||
$(document).ready(function() {
|
||||
initThisPage();
|
||||
});
|
||||
</script>
|
||||
</%def>
|
|
@ -15,7 +15,7 @@
|
|||
<%def name="body()">
|
||||
|
||||
<div id="paddingheader">
|
||||
<h1 class="clearfix">Borg Importing Results</h1>
|
||||
<h1 class="clearfix">Ultron Import Results</h1>
|
||||
</div>
|
||||
<div id="tabs">
|
||||
<ul>
|
||||
|
@ -25,7 +25,9 @@
|
|||
|
||||
<table>
|
||||
<tr>
|
||||
<fieldset>
|
||||
<span style="position:right">
|
||||
<img src="interfaces/default/images/ultron.png" style="float:right" height="125" width="125" />
|
||||
<fieldset>
|
||||
<div class="row checkbox">
|
||||
<input type="checkbox" name="autoadd" style="vertical-align: middle; margin: 3px; margin-top: -1px;" id="autoadd" value="1" ${checked(mylar.ADD_COMICS)}><label>Auto-add new series</label>
|
||||
</div>
|
||||
|
@ -43,6 +45,7 @@
|
|||
</div>
|
||||
|
||||
</fieldset>
|
||||
</span>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
|
|
@ -26,16 +26,24 @@
|
|||
myDB = db.DBConnection()
|
||||
issue = myDB.select("SELECT * FROM issues WHERE ComicID=?", [comic['ComicID']])
|
||||
wantedc = myDB.action("SELECT COUNT(*) as count FROM issues WHERE ComicID=? AND Status='Wanted'", [comic['ComicID']]).fetchone()
|
||||
anwantedc = myDB.action("SELECT COUNT(*) as count FROM annuals WHERE ComicID=? AND Status='Wanted'", [comic['ComicID']]).fetchone()
|
||||
annual = myDB.action("SELECT COUNT(*) as count FROM annuals WHERE ComicID=?", [comic['ComicID']]).fetchone()
|
||||
if annuals_on:
|
||||
anwantedc = myDB.action("SELECT COUNT(*) as count FROM annuals WHERE ComicID=? AND Status='Wanted'", [comic['ComicID']]).fetchone()
|
||||
annual = myDB.action("SELECT COUNT(*) as count FROM annuals WHERE ComicID=?", [comic['ComicID']]).fetchone()
|
||||
else:
|
||||
anwantedc = 0
|
||||
annual = 0
|
||||
archedc = myDB.action("SELECT COUNT(*) as count FROM issues WHERE ComicID=? AND Status='Archived'", [comic['ComicID']]).fetchone()
|
||||
annualcount = annual[0]
|
||||
if not annualcount:
|
||||
if annuals_on:
|
||||
annualcount = annual[0]
|
||||
if not annualcount:
|
||||
annualcount = 0
|
||||
anns = anwantedc[0]
|
||||
else:
|
||||
annualcount = 0
|
||||
anns = 0
|
||||
totaltracks = comic['Total'] + annualcount
|
||||
havetracks = comic['Have']
|
||||
wants = wantedc[0]
|
||||
anns = anwantedc[0]
|
||||
arcs = archedc[0]
|
||||
if not havetracks:
|
||||
havetracks = 0
|
||||
|
|
|
@ -41,7 +41,7 @@ test -x $DAEMON || exit 0
|
|||
set -e
|
||||
|
||||
# Create PID if missing and remove stale PID file
|
||||
if [ ! =d $PID_PATH ]; then
|
||||
if [ ! -d $PID_PATH ]; then
|
||||
mkdir -p $PID_PATH
|
||||
chown $RUN_AS $PID_PATH
|
||||
fi
|
||||
|
|
|
@ -141,7 +141,9 @@ class PostProcessor(object):
|
|||
logger.fdebug("nzb name: " + str(self.nzb_name))
|
||||
logger.fdebug("nzb folder: " + str(self.nzb_folder))
|
||||
if mylar.USE_SABNZBD==0:
|
||||
logger.fdebug("Not using SABNzbd")
|
||||
logger.fdebug("Not using SABnzbd")
|
||||
elif mylar.USE_SABNZBD != 0 and self.nzb_name == 'Manual Run':
|
||||
logger.fdebug('Not using SABnzbd : Manual Run')
|
||||
else:
|
||||
# if the SAB Directory option is enabled, let's use that folder name and append the jobname.
|
||||
if mylar.SAB_DIRECTORY is not None and mylar.SAB_DIRECTORY is not 'None' and len(mylar.SAB_DIRECTORY) > 4:
|
||||
|
@ -197,7 +199,7 @@ class PostProcessor(object):
|
|||
watchmatch = filechecker.listFiles(self.nzb_folder,cs['ComicName'],cs['AlternateSearch'], manual=watchvals)
|
||||
if watchmatch is None:
|
||||
nm+=1
|
||||
pass
|
||||
continue
|
||||
else:
|
||||
fn = 0
|
||||
fccnt = int(watchmatch['comiccount'])
|
||||
|
@ -659,6 +661,7 @@ class PostProcessor(object):
|
|||
ofilename = filename
|
||||
path, ext = os.path.splitext(ofilename)
|
||||
else:
|
||||
otofilename = ml['ComicLocation']
|
||||
print "otofilename:" + str(otofilename)
|
||||
odir, ofilename = os.path.split(otofilename)
|
||||
print "ofilename: " + str(ofilename)
|
||||
|
|
|
@ -104,6 +104,7 @@ SEARCH_INTERVAL = 360
|
|||
NZB_STARTUP_SEARCH = False
|
||||
LIBRARYSCAN_INTERVAL = 300
|
||||
DOWNLOAD_SCAN_INTERVAL = 5
|
||||
CHECK_FOLDER = None
|
||||
INTERFACE = None
|
||||
|
||||
PREFERRED_QUALITY = 0
|
||||
|
@ -319,7 +320,7 @@ def initialize():
|
|||
ENABLE_TORRENTS, TORRENT_LOCAL, LOCAL_WATCHDIR, TORRENT_SEEDBOX, SEEDBOX_HOST, SEEDBOX_PORT, SEEDBOX_USER, SEEDBOX_PASS, SEEDBOX_WATCHDIR, \
|
||||
ENABLE_RSS, RSS_CHECKINTERVAL, RSS_LASTRUN, ENABLE_TORRENT_SEARCH, ENABLE_KAT, KAT_PROXY, ENABLE_CBT, CBT_PASSKEY, \
|
||||
PROWL_ENABLED, PROWL_PRIORITY, PROWL_KEYS, PROWL_ONSNATCH, NMA_ENABLED, NMA_APIKEY, NMA_PRIORITY, NMA_ONSNATCH, PUSHOVER_ENABLED, PUSHOVER_PRIORITY, PUSHOVER_APIKEY, PUSHOVER_USERKEY, PUSHOVER_ONSNATCH, BOXCAR_ENABLED, BOXCAR_USERNAME, BOXCAR_ONSNATCH, LOCMOVE, NEWCOM_DIR, FFTONEWCOM_DIR, \
|
||||
PREFERRED_QUALITY, MOVE_FILES, RENAME_FILES, LOWERCASE_FILENAMES, USE_MINSIZE, MINSIZE, USE_MAXSIZE, MAXSIZE, CORRECT_METADATA, FOLDER_FORMAT, FILE_FORMAT, REPLACE_CHAR, REPLACE_SPACES, ADD_TO_CSV, CVINFO, LOG_LEVEL, POST_PROCESSING, SEARCH_DELAY, GRABBAG_DIR, READ2FILENAME, STORYARCDIR, CVURL, CVAPIFIX, \
|
||||
PREFERRED_QUALITY, MOVE_FILES, RENAME_FILES, LOWERCASE_FILENAMES, USE_MINSIZE, MINSIZE, USE_MAXSIZE, MAXSIZE, CORRECT_METADATA, FOLDER_FORMAT, FILE_FORMAT, REPLACE_CHAR, REPLACE_SPACES, ADD_TO_CSV, CVINFO, LOG_LEVEL, POST_PROCESSING, SEARCH_DELAY, GRABBAG_DIR, READ2FILENAME, STORYARCDIR, CVURL, CVAPIFIX, CHECK_FOLDER, \
|
||||
COMIC_LOCATION, QUAL_ALTVERS, QUAL_SCANNER, QUAL_TYPE, QUAL_QUALITY, ENABLE_EXTRA_SCRIPTS, EXTRA_SCRIPTS, ENABLE_PRE_SCRIPTS, PRE_SCRIPTS, PULLNEW, COUNT_ISSUES, COUNT_HAVES, COUNT_COMICS, SYNO_FIX, CHMOD_FILE, CHMOD_DIR, ANNUALS_ON, CV_ONLY, CV_ONETIMER, WEEKFOLDER
|
||||
|
||||
if __INITIALIZED__:
|
||||
|
@ -375,6 +376,7 @@ def initialize():
|
|||
IMP_RENAME = bool(check_setting_int(CFG, 'General', 'imp_rename', 0))
|
||||
IMP_METADATA = bool(check_setting_int(CFG, 'General', 'imp_metadata', 0))
|
||||
DOWNLOAD_SCAN_INTERVAL = check_setting_int(CFG, 'General', 'download_scan_interval', 5)
|
||||
CHECK_FOLDER = check_setting_str(CFG, 'General', 'check_folder', '')
|
||||
INTERFACE = check_setting_str(CFG, 'General', 'interface', 'default')
|
||||
AUTOWANT_ALL = bool(check_setting_int(CFG, 'General', 'autowant_all', 0))
|
||||
AUTOWANT_UPCOMING = bool(check_setting_int(CFG, 'General', 'autowant_upcoming', 1))
|
||||
|
@ -867,6 +869,7 @@ def config_write():
|
|||
new_config['General']['imp_rename'] = int(IMP_RENAME)
|
||||
new_config['General']['imp_metadata'] = int(IMP_METADATA)
|
||||
new_config['General']['download_scan_interval'] = DOWNLOAD_SCAN_INTERVAL
|
||||
new_config['General']['check_folder'] = CHECK_FOLDER
|
||||
new_config['General']['interface'] = INTERFACE
|
||||
new_config['General']['autowant_all'] = int(AUTOWANT_ALL)
|
||||
new_config['General']['autowant_upcoming'] = int(AUTOWANT_UPCOMING)
|
||||
|
@ -1036,11 +1039,13 @@ def start():
|
|||
# Start our scheduled background tasks
|
||||
#from mylar import updater, searcher, librarysync, postprocessor
|
||||
|
||||
from mylar import updater, search, weeklypull
|
||||
from mylar import updater, search, weeklypull, PostProcessor
|
||||
|
||||
SCHED.add_interval_job(updater.dbUpdate, hours=48)
|
||||
SCHED.add_interval_job(search.searchforissue, minutes=SEARCH_INTERVAL)
|
||||
|
||||
helpers.latestdate_fix()
|
||||
|
||||
#initiate startup rss feeds for torrents/nzbs here...
|
||||
if ENABLE_RSS:
|
||||
SCHED.add_interval_job(rsscheck.tehMain, minutes=int(RSS_CHECKINTERVAL))
|
||||
|
@ -1064,7 +1069,11 @@ def start():
|
|||
if CHECK_GITHUB:
|
||||
SCHED.add_interval_job(versioncheck.checkGithub, minutes=CHECK_GITHUB_INTERVAL)
|
||||
|
||||
#SCHED.add_interval_job(postprocessor.checkFolder, minutes=DOWNLOAD_SCAN_INTERVAL)
|
||||
#run checkFolder every X minutes (basically Manual Run Post-Processing)
|
||||
logger.info('CHECK_FOLDER SET TO: ' + str(CHECK_FOLDER))
|
||||
if CHECK_FOLDER:
|
||||
logger.info('Setting monitor on folder : ' + str(CHECK_FOLDER))
|
||||
SCHED.add_interval_job(helpers.checkFolder, minutes=int(DOWNLOAD_SCAN_INTERVAL))
|
||||
|
||||
SCHED.start()
|
||||
|
||||
|
@ -1087,7 +1096,7 @@ def dbcheck():
|
|||
c.execute('CREATE TABLE IF NOT EXISTS readinglist(StoryArcID TEXT, ComicName TEXT, IssueNumber TEXT, SeriesYear TEXT, IssueYEAR TEXT, StoryArc TEXT, TotalIssues TEXT, Status TEXT, inCacheDir TEXT, Location TEXT, IssueArcID TEXT, ReadingOrder INT, IssueID TEXT)')
|
||||
c.execute('CREATE TABLE IF NOT EXISTS annuals (IssueID TEXT, Issue_Number TEXT, IssueName TEXT, IssueDate TEXT, Status TEXT, ComicID TEXT, GCDComicID TEXT, Location TEXT, ComicSize TEXT, Int_IssueNumber INT, ComicName TEXT)')
|
||||
c.execute('CREATE TABLE IF NOT EXISTS rssdb (Title TEXT UNIQUE, Link TEXT, Pubdate TEXT, Site TEXT, Size TEXT)')
|
||||
|
||||
c.execute('CREATE TABLE IF NOT EXISTS futureupcoming (ComicName TEXT, IssueNumber TEXT, ComicID TEXT, IssueID TEXT, IssueDate TEXT, Publisher TEXT, Status TEXT, DisplayComicName TEXT)')
|
||||
conn.commit
|
||||
c.close
|
||||
#new
|
||||
|
|
|
@ -88,14 +88,55 @@ def listFiles(dir,watchcomic,AlternateSearch=None,manual=None,sarc=None):
|
|||
subname = re.sub(subit, '', subname)
|
||||
volrem = subit
|
||||
|
||||
#check if a year is present in series title (ie. spider-man 2099)
|
||||
numberinseries = 'False'
|
||||
|
||||
for i in watchcomic.split():
|
||||
if ('20' in i or '19' in i):
|
||||
if i.isdigit():
|
||||
numberinseries = 'True'
|
||||
else:
|
||||
find20 = i.find('20')
|
||||
if find20:
|
||||
stf = i[find20:4].strip()
|
||||
find19 = i.find('19')
|
||||
if find19:
|
||||
stf = i[find19:4].strip()
|
||||
logger.fdebug('stf is : ' + str(stf))
|
||||
if stf.isdigit():
|
||||
numberinseries = 'True'
|
||||
|
||||
logger.fdebug('numberinseries: ' + numberinseries)
|
||||
|
||||
#remove the brackets..
|
||||
subnm = re.findall('[^()]+', subname)
|
||||
if len(subnm):
|
||||
logger.fdebug("detected invalid filename - attempting to detect year to continue")
|
||||
subname = re.sub('(.*)\s+(19\d{2}|20\d{2})(.*)', '\\1 (\\2) \\3', subname)
|
||||
subnm = re.findall('[^()]+', subname)
|
||||
logger.fdebug('subnm len : ' + str(len(subnm)))
|
||||
if len(subnm) == 1:
|
||||
logger.fdebug(str(len(subnm)) + ': detected invalid filename - attempting to detect year to continue')
|
||||
#if the series has digits this f's it up.
|
||||
if numberinseries == 'True':
|
||||
#we need to remove the series from the subname and then search the remainder.
|
||||
watchname = re.sub('[-\:\;\!\'\/\?\+\=\_\%\.]', '', watchcomic) #remove spec chars for watchcomic match.
|
||||
logger.fdebug('watch-cleaned: ' + str(watchname))
|
||||
subthis = re.sub('.cbr', '', subname)
|
||||
subthis = re.sub('.cbz', '', subthis)
|
||||
subthis = re.sub('[-\:\;\!\'\/\?\+\=\_\%\.]', '', subthis)
|
||||
logger.fdebug('sub-cleaned: ' + str(subthis))
|
||||
subthis = subthis[len(watchname):] #remove watchcomic
|
||||
#we need to now check the remainder of the string for digits assuming it's a possible year
|
||||
logger.fdebug('new subname: ' + str(subthis))
|
||||
subname = re.sub('(.*)\s+(19\d{2}|20\d{2})(.*)', '\\1 (\\2) \\3', subthis)
|
||||
subname = watchcomic + subname
|
||||
subnm = re.findall('[^()]+', subname)
|
||||
else:
|
||||
subname = re.sub('(.*)\s+(19\d{2}|20\d{2})(.*)', '\\1 (\\2) \\3', subname)
|
||||
subnm = re.findall('[^()]+', subname)
|
||||
|
||||
subname = subnm[0]
|
||||
|
||||
if len(subnm):
|
||||
# if it still has no year (brackets), check setting and either assume no year needed.
|
||||
subname = subname
|
||||
logger.fdebug('subname no brackets: ' + str(subname))
|
||||
subname = re.sub('\_', ' ', subname)
|
||||
nonocount = 0
|
||||
|
@ -180,7 +221,7 @@ def listFiles(dir,watchcomic,AlternateSearch=None,manual=None,sarc=None):
|
|||
AS_Alternate = re.sub('##','',calt)
|
||||
#same = encode.
|
||||
u_altsearchcomic = AS_Alternate.encode('ascii', 'ignore').strip()
|
||||
altsearchcomic = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\+\'\?\@]', '', u_altsearchcomic)
|
||||
altsearchcomic = re.sub('[\_\#\,\/\:\;\.\-\!\$\%\+\'\?\@]', ' ', u_altsearchcomic)
|
||||
altsearchcomic = re.sub('\&', ' and ', altsearchcomic)
|
||||
altsearchcomic = re.sub('\s+', ' ', str(altsearchcomic)).strip()
|
||||
AS_Alt.append(altsearchcomic)
|
||||
|
@ -349,6 +390,7 @@ def listFiles(dir,watchcomic,AlternateSearch=None,manual=None,sarc=None):
|
|||
logger.fdebug('final justthedigits [' + justthedigits + ']')
|
||||
if digitsvalid == "false":
|
||||
logger.fdebug('Issue number not properly detected...ignoring.')
|
||||
comiccnt -=1 # remove the entry from the list count as it was incorrrectly tallied.
|
||||
continue
|
||||
|
||||
|
||||
|
|
|
@ -93,6 +93,7 @@ def Startit(searchName, searchIssue, searchYear, ComicVersion):
|
|||
for title, link in keyPair.items():
|
||||
#logger.fdebug("titlesplit: " + str(title.split("\"")))
|
||||
splitTitle = title.split("\"")
|
||||
noYear = 'False'
|
||||
|
||||
for subs in splitTitle:
|
||||
logger.fdebug(subs)
|
||||
|
@ -108,11 +109,23 @@ def Startit(searchName, searchIssue, searchYear, ComicVersion):
|
|||
# 'title': subs,
|
||||
# 'link': str(link)
|
||||
# })
|
||||
entries.append({
|
||||
'title': subs,
|
||||
'link': str(link)
|
||||
})
|
||||
if searchYear not in subs:
|
||||
noYear = 'True'
|
||||
noYearline = subs
|
||||
|
||||
if searchYear in subs and noYear == 'True':
|
||||
#this would occur on the next check in the line, if year exists and
|
||||
#the noYear check in the first check came back valid append it
|
||||
subs = noYearline + ' (' + searchYear + ')'
|
||||
noYear = 'False'
|
||||
|
||||
if noYear == 'False':
|
||||
|
||||
entries.append({
|
||||
'title': subs,
|
||||
'link': str(link)
|
||||
})
|
||||
break # break out so we don't write more shit.
|
||||
|
||||
# if len(entries) >= 1:
|
||||
if tallycount >= 1:
|
||||
|
|
|
@ -45,7 +45,23 @@ def putfile(localpath,file): #localpath=full path to .torrent (including file
|
|||
logger.fdebug('Skipping file at this time.')
|
||||
return "fail"
|
||||
|
||||
sftp.put(localpath, rempath)
|
||||
sendcheck = False
|
||||
|
||||
while sendcheck == False:
|
||||
try:
|
||||
sftp.put(localpath, rempath)
|
||||
sendcheck = True
|
||||
except Exception, e:
|
||||
logger.fdebug('ERROR Sending torrent to seedbox *** Caught exception: %s: %s' % (e.__class__, e))
|
||||
logger.fdebug('Forcibly closing connection and attempting to reconnect')
|
||||
sftp.close()
|
||||
transport.close()
|
||||
#reload the transport here cause it locked up previously.
|
||||
transport = paramiko.Transport((host, port))
|
||||
transport.connect(username = username, password = password)
|
||||
sftp = paramiko.SFTPClient.from_transport(transport)
|
||||
logger.fdebug('sucessfully reconnected via sftp - attempting to resend.')
|
||||
#return "fail"
|
||||
|
||||
sftp.close()
|
||||
transport.close()
|
||||
|
|
|
@ -788,3 +788,48 @@ def renamefile_readingorder(readorder):
|
|||
else: readord = str(readorder)
|
||||
|
||||
return readord
|
||||
|
||||
def latestdate_fix():
|
||||
import db, logger
|
||||
datefix = []
|
||||
myDB = db.DBConnection()
|
||||
comiclist = myDB.action('SELECT * FROM comics')
|
||||
if comiclist is None:
|
||||
logger.fdebug('No Series in watchlist to correct latest date')
|
||||
return
|
||||
for cl in comiclist:
|
||||
latestdate = cl['LatestDate']
|
||||
#logger.fdebug("latestdate: " + str(latestdate))
|
||||
if latestdate[8:] == '':
|
||||
#logger.fdebug("invalid date " + str(latestdate) + " appending 01 for day to avoid errors")
|
||||
if len(latestdate) <= 7:
|
||||
finddash = latestdate.find('-')
|
||||
#logger.info('dash found at position ' + str(finddash))
|
||||
if finddash != 4: #format of mm-yyyy
|
||||
lat_month = latestdate[:finddash]
|
||||
lat_year = latestdate[finddash+1:]
|
||||
else: #format of yyyy-mm
|
||||
lat_month = latestdate[finddash+1:]
|
||||
lat_year = latestdate[:finddash]
|
||||
|
||||
latestdate = (lat_year) + '-' + str(lat_month) + '-01'
|
||||
datefix.append({"comicid": cl['ComicID'],
|
||||
"latestdate": latestdate})
|
||||
#logger.info('latest date: ' + str(latestdate))
|
||||
|
||||
#now we fix.
|
||||
if len(datefix) > 0:
|
||||
for df in datefix:
|
||||
newCtrl = {"ComicID": df['comicid']}
|
||||
newVal = {"LatestDate": df['latestdate']}
|
||||
myDB.upsert("comics", newVal, newCtrl)
|
||||
return
|
||||
|
||||
def checkFolder():
|
||||
import PostProcessor, logger
|
||||
#monitor a selected folder for 'snatched' files that haven't been processed
|
||||
logger.info('Checking folder ' + mylar.CHECK_FOLDER + ' for newly snatched downloads')
|
||||
PostProcess = PostProcessor.PostProcessor('Manual Run', mylar.CHECK_FOLDER)
|
||||
result = PostProcess.Process()
|
||||
logger.info('Finished checking for newly snatched downloads')
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ import shlex
|
|||
import datetime
|
||||
import re
|
||||
import urllib
|
||||
import urllib2
|
||||
import shutil
|
||||
import sqlite3
|
||||
import cherrypy
|
||||
|
@ -42,7 +43,7 @@ def is_exists(comicid):
|
|||
return False
|
||||
|
||||
|
||||
def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None):
|
||||
def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None,calledfrom=None):
|
||||
# Putting this here to get around the circular import. Will try to use this to update images at later date.
|
||||
# from mylar import cache
|
||||
|
||||
|
@ -353,24 +354,43 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None):
|
|||
|
||||
coverfile = os.path.join(mylar.CACHE_DIR, str(comicid) + ".jpg")
|
||||
|
||||
#try:
|
||||
urllib.urlretrieve(str(comic['ComicImage']), str(coverfile))
|
||||
#if cover has '+' in url it's malformed, we need to replace '+' with '%20' to retreive properly.
|
||||
#thisci = urllib.quote_plus(str(comic['ComicImage']))
|
||||
|
||||
#urllib.urlretrieve(str(thisci), str(coverfile))
|
||||
|
||||
try:
|
||||
with open(str(coverfile)) as f:
|
||||
PRComicImage = os.path.join('cache',str(comicid) + ".jpg")
|
||||
ComicImage = helpers.replacetheslash(PRComicImage)
|
||||
cimage = re.sub('[\+]','%20', comic['ComicImage'])
|
||||
request = urllib2.Request(cimage)#, headers={'Content-Type': 'application/x-www-form-urlencoded'})
|
||||
#request.add_header('User-Agent', str(mylar.USER_AGENT))
|
||||
|
||||
response = urllib2.urlopen(request)
|
||||
|
||||
com_image = response.read()
|
||||
|
||||
with open(coverfile, 'wb') as the_file:
|
||||
the_file.write(com_image)
|
||||
|
||||
logger.info('Successfully retrieved cover for ' + comic['ComicName'])
|
||||
|
||||
except Exception, e:
|
||||
logger.warn('[%s] Error fetching data using : %s' % (e, comic['ComicImage']))
|
||||
|
||||
|
||||
PRComicImage = os.path.join('cache',str(comicid) + ".jpg")
|
||||
ComicImage = helpers.replacetheslash(PRComicImage)
|
||||
|
||||
#this is for Firefox when outside the LAN...it works, but I don't know how to implement it
|
||||
#without breaking the normal flow for inside the LAN (above)
|
||||
#ComicImage = "http://" + str(mylar.HTTP_HOST) + ":" + str(mylar.HTTP_PORT) + "/cache/" + str(comicid) + ".jpg"
|
||||
|
||||
logger.info('Sucessfully retrieved cover for ' + comic['ComicName'])
|
||||
#if the comic cover local is checked, save a cover.jpg to the series folder.
|
||||
if mylar.COMIC_COVER_LOCAL:
|
||||
comiclocal = os.path.join(comlocation,'cover.jpg')
|
||||
shutil.copy(ComicImage,comiclocal)
|
||||
except IOError as e:
|
||||
logger.error('Unable to save cover locally at this time.')
|
||||
#if the comic cover local is checked, save a cover.jpg to the series folder.
|
||||
if mylar.COMIC_COVER_LOCAL:
|
||||
try:
|
||||
comiclocal = os.path.join(comlocation,'cover.jpg')
|
||||
shutil.copy(ComicImage,comiclocal)
|
||||
except IOError as e:
|
||||
logger.error('Unable to save cover into series directory at this time.')
|
||||
|
||||
if oldcomversion is None:
|
||||
if comic['ComicVersion'].isdigit():
|
||||
|
@ -419,6 +439,7 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None):
|
|||
issnum = []
|
||||
issname = []
|
||||
issdate = []
|
||||
issuedata = []
|
||||
int_issnum = []
|
||||
#let's start issue #'s at 0 -- thanks to DC for the new 52 reboot! :)
|
||||
latestiss = "0"
|
||||
|
@ -678,49 +699,72 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None):
|
|||
logger.error(str(issnum) + ' this has an alpha-numeric in the issue # which I cannot account for.')
|
||||
return
|
||||
#get the latest issue / date using the date.
|
||||
logger.info('latest date: ' + str(latestdate))
|
||||
logger.info('first date: ' + str(firstdate))
|
||||
logger.info('issue date: ' + str(firstval['Issue_Date']))
|
||||
if firstval['Issue_Date'] > latestdate:
|
||||
latestiss = issnum
|
||||
latestdate = str(firstval['Issue_Date'])
|
||||
if firstval['Issue_Date'] < firstdate:
|
||||
firstiss = issnum
|
||||
firstdate = str(firstval['Issue_Date'])
|
||||
# check if the issue already exists
|
||||
iss_exists = myDB.action('SELECT * from issues WHERE IssueID=?', [issid]).fetchone()
|
||||
# # check if the issue already exists
|
||||
# iss_exists = myDB.action('SELECT * from issues WHERE IssueID=?', [issid]).fetchone()
|
||||
|
||||
# Only change the status & add DateAdded if the issue is already in the database
|
||||
if iss_exists is None:
|
||||
newValueDict['DateAdded'] = helpers.today()
|
||||
# # Only change the status & add DateAdded if the issue is already in the database
|
||||
# if iss_exists is None:
|
||||
# newValueDict['DateAdded'] = helpers.today()
|
||||
|
||||
controlValueDict = {"IssueID": issid}
|
||||
newValueDict = {"ComicID": comicid,
|
||||
"ComicName": comic['ComicName'],
|
||||
"IssueName": issname,
|
||||
"Issue_Number": issnum,
|
||||
"IssueDate": issdate,
|
||||
"Int_IssueNumber": int_issnum
|
||||
}
|
||||
# controlValueDict = {"IssueID": issid}
|
||||
# newValueDict = {"ComicID": comicid,
|
||||
# "ComicName": comic['ComicName'],
|
||||
# "IssueName": issname,
|
||||
# "Issue_Number": issnum,
|
||||
# "IssueDate": issdate,
|
||||
# "Int_IssueNumber": int_issnum
|
||||
# }
|
||||
|
||||
if iss_exists:
|
||||
#print ("Existing status : " + str(iss_exists['Status']))
|
||||
newValueDict['Status'] = iss_exists['Status']
|
||||
else:
|
||||
#print "issue doesn't exist in db."
|
||||
if mylar.AUTOWANT_ALL:
|
||||
newValueDict['Status'] = "Wanted"
|
||||
elif issdate > helpers.today() and mylar.AUTOWANT_UPCOMING:
|
||||
newValueDict['Status'] = "Wanted"
|
||||
else:
|
||||
newValueDict['Status'] = "Skipped"
|
||||
issuedata.append({"ComicID": comicid,
|
||||
"IssueID": issid,
|
||||
"ComicName": comic['ComicName'],
|
||||
"IssueName": issname,
|
||||
"Issue_Number": issnum,
|
||||
"IssueDate": issdate,
|
||||
"Int_IssueNumber": int_issnum})
|
||||
|
||||
#logger.info('issuedata: ' + str(issuedata))
|
||||
|
||||
# if iss_exists:
|
||||
# print ("Existing status : " + str(iss_exists['Status']))
|
||||
# newValueDict['Status'] = iss_exists['Status']
|
||||
# else:
|
||||
# print "issue doesn't exist in db."
|
||||
# if mylar.AUTOWANT_ALL:
|
||||
# newValueDict['Status'] = "Wanted"
|
||||
# elif issdate > helpers.today() and mylar.AUTOWANT_UPCOMING:
|
||||
# newValueDict['Status'] = "Wanted"
|
||||
# else:
|
||||
# newValueDict['Status'] = "Skipped"
|
||||
|
||||
# try:
|
||||
# myDB.upsert("issues", newValueDict, controlValueDict)
|
||||
# except sqlite3.InterfaceError, e:
|
||||
# #raise sqlite3.InterfaceError(e)
|
||||
# logger.error('Something went wrong - I cannot add the issue information into my DB.')
|
||||
# myDB.action("DELETE FROM comics WHERE ComicID=?", [comicid])
|
||||
# return
|
||||
|
||||
try:
|
||||
myDB.upsert("issues", newValueDict, controlValueDict)
|
||||
except sqlite3.InterfaceError, e:
|
||||
#raise sqlite3.InterfaceError(e)
|
||||
logger.error('Something went wrong - I cannot add the issue information into my DB.')
|
||||
myDB.action("DELETE FROM comics WHERE ComicID=?", [comicid])
|
||||
return
|
||||
n+=1
|
||||
|
||||
if len(issuedata) > 1 and not calledfrom == 'dbupdate':
|
||||
logger.fdebug('initiating issue updating - info & status')
|
||||
issue_collection(issuedata,nostatus='False')
|
||||
else:
|
||||
logger.fdebug('initiating issue updating - just the info')
|
||||
issue_collection(issuedata,nostatus='True')
|
||||
|
||||
#issue_collection(issuedata,nostatus='False')
|
||||
|
||||
#figure publish dates here...
|
||||
styear = str(SeriesYear)
|
||||
#if SeriesYear == '0000':
|
||||
|
@ -777,8 +821,16 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None):
|
|||
logger.info('Mass import - Moving not Enabled. Setting Archived Status for import.')
|
||||
moveit.archivefiles(comicid,ogcname)
|
||||
|
||||
if calledfrom == 'dbupdate':
|
||||
logger.info('returning to dbupdate module')
|
||||
return
|
||||
|
||||
#check for existing files...
|
||||
statbefore = myDB.action("SELECT * FROM issues WHERE ComicID=? AND Issue_Number=?", [comicid,str(latestiss)]).fetchone()
|
||||
logger.info('status before chk :' + statbefore['Status'])
|
||||
updater.forceRescan(comicid)
|
||||
statafter = myDB.action("SELECT * FROM issues WHERE ComicID=? AND Issue_Number=?", [comicid,str(latestiss)]).fetchone()
|
||||
logger.info('status after chk :' + statafter['Status'])
|
||||
|
||||
if pullupd is None:
|
||||
# lets' check the pullist for anything at this time as well since we're here.
|
||||
|
@ -786,8 +838,8 @@ def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None):
|
|||
if mylar.AUTOWANT_UPCOMING and lastpubdate == 'Present': #and 'Present' in gcdinfo['resultPublished']:
|
||||
logger.fdebug('latestissue: #' + str(latestiss))
|
||||
chkstats = myDB.action("SELECT * FROM issues WHERE ComicID=? AND Issue_Number=?", [comicid,str(latestiss)]).fetchone()
|
||||
logger.fdebug(chkstats['Status'])
|
||||
if chkstats['Status'] == 'Skipped' or chkstats['Status'] == 'Wanted' or chkstats['Status'] == 'Snatched':
|
||||
logger.fdebug('latestissue status: ' + chkstats['Status'])
|
||||
if chkstats['Status'] == 'Skipped' or chkstats['Status'] == 'Wanted': # or chkstats['Status'] == 'Snatched':
|
||||
logger.info('Checking this week pullist for new issues of ' + comic['ComicName'])
|
||||
updater.newpullcheck(comic['ComicName'], comicid)
|
||||
|
||||
|
@ -1147,3 +1199,55 @@ def GCDimport(gcomicid, pullupd=None,imported=None,ogcname=None):
|
|||
|
||||
logger.info(u"Finished grabbing what I could.")
|
||||
|
||||
|
||||
def issue_collection(issuedata,nostatus):
|
||||
myDB = db.DBConnection()
|
||||
|
||||
logger.info('issue collection...')
|
||||
if issuedata:
|
||||
logger.info('issuedata exists')
|
||||
for issue in issuedata:
|
||||
|
||||
|
||||
controlValueDict = {"IssueID": issue['IssueID']}
|
||||
newValueDict = {"ComicID": issue['ComicID'],
|
||||
"ComicName": issue['ComicName'],
|
||||
"IssueName": issue['IssueName'],
|
||||
"Issue_Number": issue['Issue_Number'],
|
||||
"IssueDate": issue['IssueDate'],
|
||||
"Int_IssueNumber": issue['Int_IssueNumber']
|
||||
}
|
||||
|
||||
|
||||
if nostatus == 'False':
|
||||
logger.info('issue')
|
||||
# check if the issue already exists
|
||||
iss_exists = myDB.action('SELECT * from issues WHERE IssueID=?', [issue['IssueID']]).fetchone()
|
||||
|
||||
# Only change the status & add DateAdded if the issue is already in the database
|
||||
if iss_exists is None:
|
||||
newValueDict['DateAdded'] = helpers.today()
|
||||
print "issue doesn't exist in db."
|
||||
if mylar.AUTOWANT_ALL:
|
||||
newValueDict['Status'] = "Wanted"
|
||||
elif issue['IssueDate'] > helpers.today() and mylar.AUTOWANT_UPCOMING:
|
||||
newValueDict['Status'] = "Wanted"
|
||||
else:
|
||||
newValueDict['Status'] = "Skipped"
|
||||
|
||||
else:
|
||||
print ("Existing status : " + str(iss_exists['Status']))
|
||||
newValueDict['Status'] = iss_exists['Status']
|
||||
|
||||
else:
|
||||
print ("Not changing the status at this time - reverting to previous module after to re-append existing status")
|
||||
newValueDict['Status'] = "Skipped"
|
||||
|
||||
try:
|
||||
myDB.upsert("issues", newValueDict, controlValueDict)
|
||||
except sqlite3.InterfaceError, e:
|
||||
#raise sqlite3.InterfaceError(e)
|
||||
logger.error('Something went wrong - I cannot add the issue information into my DB.')
|
||||
myDB.action("DELETE FROM comics WHERE ComicID=?", [issue['ComicID']])
|
||||
return
|
||||
|
||||
|
|
|
@ -12,11 +12,11 @@ from StringIO import StringIO
|
|||
import mylar
|
||||
from mylar import db, logger, ftpsshup, helpers
|
||||
|
||||
def tehMain():
|
||||
def tehMain(forcerss=None):
|
||||
logger.info('RSS Feed Check was last run at : ' + str(mylar.RSS_LASTRUN))
|
||||
firstrun = "no"
|
||||
#check the last run of rss to make sure it's not hammering.
|
||||
if mylar.RSS_LASTRUN is None or mylar.RSS_LASTRUN == '' or mylar.RSS_LASTRUN == '0':
|
||||
if mylar.RSS_LASTRUN is None or mylar.RSS_LASTRUN == '' or mylar.RSS_LASTRUN == '0' or forcerss == True:
|
||||
logger.info('RSS Feed Check First Ever Run.')
|
||||
firstrun = "yes"
|
||||
mins = 0
|
||||
|
@ -426,9 +426,16 @@ def torrentdbsearch(seriesname,issue,comicid=None,nzbprov=None):
|
|||
i=0
|
||||
#0 holds the title/issue and format-type.
|
||||
while (i < len(torsplit)):
|
||||
#we'll rebuild the string here so that it's formatted accordingly to be passed back to the parser.
|
||||
logger.fdebug('section(' + str(i) + '): ' + str(torsplit[i]))
|
||||
if i == 0:
|
||||
rebuiltline = str(torsplit[i])
|
||||
else:
|
||||
rebuiltline = rebuiltline + ' (' + str(torsplit[i]) + ')'
|
||||
i+=1
|
||||
|
||||
logger.fdebug('rebuiltline is :' + str(rebuiltline))
|
||||
|
||||
seriesname_mod = seriesname
|
||||
foundname_mod = torsplit[0]
|
||||
seriesname_mod = re.sub("\\band\\b", " ", seriesname_mod.lower())
|
||||
|
@ -439,10 +446,13 @@ def torrentdbsearch(seriesname,issue,comicid=None,nzbprov=None):
|
|||
seriesname_mod = re.sub('[\&]', ' ', seriesname_mod)
|
||||
foundname_mod = re.sub('[\&]', ' ', foundname_mod)
|
||||
|
||||
formatrem_seriesname = re.sub('[\'\!\@\#\$\%\:\;\/\\=\?\.]', '',seriesname_mod)
|
||||
formatrem_seriesname = re.sub('[\'\!\@\#\$\%\:\;\=\?\.]', '',seriesname_mod)
|
||||
formatrem_seriesname = re.sub('[\/]', '-', formatrem_seriesname)
|
||||
formatrem_seriesname = re.sub('\s+', ' ', formatrem_seriesname)
|
||||
if formatrem_seriesname[:1] == ' ': formatrem_seriesname = formatrem_seriesname[1:]
|
||||
formatrem_torsplit = re.sub('[\'\!\@\#\$\%\:\;\/\\=\?\.]', '',foundname_mod)
|
||||
|
||||
formatrem_torsplit = re.sub('[\'\!\@\#\$\%\:\;\\=\?\.]', '',foundname_mod)
|
||||
formatrem_torsplit = re.sub('[\/]', '-', formatrem_torsplit)
|
||||
formatrem_torsplit = re.sub('\s+', ' ', formatrem_torsplit)
|
||||
logger.fdebug(str(len(formatrem_torsplit)) + ' - formatrem_torsplit : ' + formatrem_torsplit.lower())
|
||||
logger.fdebug(str(len(formatrem_seriesname)) + ' - formatrem_seriesname :' + formatrem_seriesname.lower())
|
||||
|
@ -565,6 +575,7 @@ def nzbdbsearch(seriesname,issue,comicid=None,nzbprov=None):
|
|||
def torsend2client(seriesname, issue, seriesyear, linkit, site):
|
||||
logger.info('matched on ' + str(seriesname))
|
||||
filename = re.sub('[\'\!\@\#\$\%\:\;\/\\=\?\.]', '',seriesname)
|
||||
filename = re.sub(' ', '_', filename)
|
||||
filename += "_" + str(issue) + "_" + str(seriesyear)
|
||||
if site == 'CBT':
|
||||
logger.info(linkit)
|
||||
|
|
|
@ -697,7 +697,14 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, nzbprov, nzbpr, Is
|
|||
logger.fdebug("Ignoring title as Cover Only detected.")
|
||||
cleantitle = "abcdefghijk 0 (1901).cbz"
|
||||
continue
|
||||
|
||||
|
||||
if ComicVersion:
|
||||
ComVersChk = re.sub("[^0-9]", "", ComicVersion)
|
||||
if ComVersChk == '':
|
||||
ComVersChk = 0
|
||||
else:
|
||||
ComVersChk = 0
|
||||
|
||||
if len(re.findall('[^()]+', cleantitle)) == 1 or 'cover only' in cleantitle.lower():
|
||||
#some sites don't have (2013) or whatever..just v2 / v2013. Let's adjust:
|
||||
#this handles when there is NO YEAR present in the title, otherwise versioning is way below.
|
||||
|
@ -721,15 +728,17 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, nzbprov, nzbpr, Is
|
|||
logger.fdebug("false version detection..ignoring.")
|
||||
|
||||
if vers4year == "no" and vers4vol == "no":
|
||||
# if there are no () in the string, try to add them if it looks like a year (19xx or 20xx)
|
||||
if len(re.findall('[^()]+', cleantitle)):
|
||||
logger.fdebug("detected invalid nzb filename - attempting to detect year to continue")
|
||||
cleantitle = re.sub('(.*)\s+(19\d{2}|20\d{2})(.*)', '\\1 (\\2) \\3', cleantitle)
|
||||
continue
|
||||
else:
|
||||
logger.fdebug("invalid nzb and/or cover only - skipping.")
|
||||
cleantitle = "abcdefghijk 0 (1901).cbz"
|
||||
continue
|
||||
# if the series is a v1, let's remove the requirements for year and volume label
|
||||
if ComVersChk != 0:
|
||||
# if there are no () in the string, try to add them if it looks like a year (19xx or 20xx)
|
||||
if len(re.findall('[^()]+', cleantitle)):
|
||||
logger.fdebug("detected invalid nzb filename - attempting to detect year to continue")
|
||||
cleantitle = re.sub('(.*)\s+(19\d{2}|20\d{2})(.*)', '\\1 (\\2) \\3', cleantitle)
|
||||
continue
|
||||
else:
|
||||
logger.fdebug("invalid nzb and/or cover only - skipping.")
|
||||
cleantitle = "abcdefghijk 0 (1901).cbz"
|
||||
continue
|
||||
|
||||
#adjust for covers only by removing them entirely...
|
||||
logger.fdebug("Cleantitle: " + str(cleantitle))
|
||||
|
@ -762,9 +771,13 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, nzbprov, nzbpr, Is
|
|||
comic_andiss = m[cnt]
|
||||
logger.fdebug("Comic: " + str(comic_andiss))
|
||||
logger.fdebug("UseFuzzy is : " + str(UseFuzzy))
|
||||
logger.fdebug('ComVersChk : ' + str(ComVersChk))
|
||||
if vers4vol != "no" or vers4year != "no":
|
||||
logger.fdebug("Year not given properly formatted but Version detected.Bypassing Year Match.")
|
||||
yearmatch = "true"
|
||||
elif ComVersChk == 0:
|
||||
logger.fdebug("Series version detected as V1 (only series in existance with that title). Bypassing Year/Volume check")
|
||||
yearmatch = "true"
|
||||
elif UseFuzzy == "0" or UseFuzzy == "2" or UseFuzzy is None or IssDateFix != "no":
|
||||
if m[cnt][:-2] == '19' or m[cnt][:-2] == '20':
|
||||
logger.fdebug('year detected: ' + str(m[cnt]))
|
||||
|
@ -985,12 +998,11 @@ def NZB_SEARCH(ComicName, IssueNumber, ComicYear, SeriesYear, nzbprov, nzbpr, Is
|
|||
logger.fdebug("vers4vol: " + str(vers4vol))
|
||||
if vers4year is not "no" or vers4vol is not "no":
|
||||
|
||||
if ComicVersion: #is not "None" and ComicVersion is not None:
|
||||
D_ComicVersion = re.sub("[^0-9]", "", ComicVersion)
|
||||
if D_ComicVersion == '':
|
||||
D_ComicVersion = 0
|
||||
#if the volume is None, assume it's a V1 to increase % hits
|
||||
if ComVersChk == 0:
|
||||
D_ComicVersion = 1
|
||||
else:
|
||||
D_ComicVersion = 0
|
||||
D_ComicVersion = ComVersChk
|
||||
|
||||
F_ComicVersion = re.sub("[^0-9]", "", fndcomicversion)
|
||||
S_ComicVersion = str(SeriesYear)
|
||||
|
|
|
@ -0,0 +1,229 @@
|
|||
|
||||
from bs4 import BeautifulSoup, UnicodeDammit
|
||||
import urllib2
|
||||
import csv
|
||||
import fileinput
|
||||
import sys
|
||||
import re
|
||||
import os
|
||||
import sqlite3
|
||||
import datetime
|
||||
from decimal import Decimal
|
||||
from HTMLParser import HTMLParseError
|
||||
from time import strptime
|
||||
|
||||
import mylar
|
||||
from mylar import logger
|
||||
|
||||
def solicit(month, year):
|
||||
#convert to numerics just to ensure this...
|
||||
month = int(month)
|
||||
year = int(year)
|
||||
|
||||
#print ( "month: " + str(month) )
|
||||
#print ( "year: " + str(year) )
|
||||
|
||||
# in order to gather ALL upcoming - let's start to loop through months going ahead one at a time
|
||||
# until we get a null then break. (Usually not more than 3 months in advance is available)
|
||||
mnloop = 0
|
||||
upcoming = []
|
||||
|
||||
while (mnloop < 5):
|
||||
|
||||
pagelinks = "http://www.comicbookresources.com/tag/solicits" + str(month) + str(year)
|
||||
pageresponse = urllib2.urlopen ( pagelinks )
|
||||
soup = BeautifulSoup (pageresponse)
|
||||
cntlinks = soup.findAll('h3')
|
||||
lenlinks = len(cntlinks)
|
||||
logger.info( str(lenlinks) + ' results' )
|
||||
|
||||
publish = []
|
||||
resultURL = []
|
||||
|
||||
x = 0
|
||||
cnt = 0
|
||||
|
||||
while (x < lenlinks):
|
||||
headt = cntlinks[x] #iterate through the hrefs pulling out only results.
|
||||
if "/?page=article&id=" in str(headt):
|
||||
#print ("titlet: " + str(headt))
|
||||
headName = headt.findNext(text=True)
|
||||
if ('Marvel' and 'DC' and 'Image' not in headName) and ('Solicitations' in headName):
|
||||
pubstart = headName.find('Solicitations')
|
||||
publish.append( headName[:pubstart].strip() )
|
||||
abc = headt.findAll('a', href=True)[0]
|
||||
ID_som = abc['href'] #first instance will have the right link...
|
||||
resultURL.append( ID_som )
|
||||
#print '[ ' + publish[cnt] + '] Link URL: ' + resultURL[cnt]
|
||||
cnt+=1
|
||||
x+=1
|
||||
|
||||
#print 'cnt:' + str(cnt)
|
||||
|
||||
if cnt == 0:
|
||||
break # no results means, end it
|
||||
|
||||
loopthis = (cnt-1)
|
||||
#this loops through each 'found' solicit page
|
||||
shipdate = str(month) + '-' + str(year)
|
||||
while ( loopthis >= 0 ):
|
||||
upcoming += populate(resultURL[loopthis], publish[loopthis], shipdate)
|
||||
loopthis -=1
|
||||
|
||||
month +=1 #increment month by 1
|
||||
mnloop +=1 #increment loop by 1
|
||||
|
||||
if month > 12: #failsafe failover for months
|
||||
month = 1
|
||||
year+=1
|
||||
|
||||
#print upcoming
|
||||
logger.info( str(len(upcoming)) + ' upcoming issues discovered.' )
|
||||
|
||||
newfl = mylar.CACHE_DIR + "/future-releases.txt"
|
||||
newtxtfile = open(newfl, 'wb')
|
||||
|
||||
cntr = 1
|
||||
for row in upcoming:
|
||||
if row['Extra'] is None or row['Extra'] == '':
|
||||
extrarow = 'N/A'
|
||||
else:
|
||||
extrarow = row['Extra']
|
||||
newtxtfile.write(str(row['Shipdate']) + '\t' + str(row['Publisher']) + '\t' + str(row['Issue']) + '\t' + str(row['Comic']) + '\t' + str(extrarow) + '\tSkipped' + '\t' + str(cntr) + '\n')
|
||||
cntr +=1
|
||||
|
||||
newtxtfile.close()
|
||||
|
||||
|
||||
logger.fdebug( 'attempting to populate future upcoming...' )
|
||||
|
||||
mylardb = os.path.join(mylar.DATA_DIR, "mylar.db")
|
||||
|
||||
connection = sqlite3.connect(str(mylardb))
|
||||
cursor = connection.cursor()
|
||||
|
||||
cursor.executescript('drop table if exists future;')
|
||||
|
||||
cursor.execute("CREATE TABLE IF NOT EXISTS future (SHIPDATE, PUBLISHER text, ISSUE text, COMIC VARCHAR(150), EXTRA text, STATUS text, FutureID text, ComicID text);")
|
||||
connection.commit()
|
||||
|
||||
csvfile = open(newfl, "rb")
|
||||
creader = csv.reader(csvfile, delimiter='\t')
|
||||
|
||||
t = 1
|
||||
|
||||
for row in creader:
|
||||
try:
|
||||
#print ("Row: %s" % row)
|
||||
cursor.execute("INSERT INTO future VALUES (?,?,?,?,?,?,?,null);", row)
|
||||
except Exception, e:
|
||||
logger.fdebug("Error - invald arguments...-skipping")
|
||||
pass
|
||||
t+=1
|
||||
logger.fdebug('successfully added ' + str(t) + ' issues to future upcoming table.')
|
||||
csvfile.close()
|
||||
connection.commit()
|
||||
connection.close()
|
||||
|
||||
|
||||
mylar.weeklypull.pullitcheck(futurepull="yes")
|
||||
#.end
|
||||
|
||||
def populate(link,publisher,shipdate):
|
||||
#this is the secondary url call to populate
|
||||
input = 'http://www.comicbookresources.com/' + link
|
||||
response = urllib2.urlopen ( input )
|
||||
soup = BeautifulSoup (response)
|
||||
abc = soup.findAll('p')
|
||||
lenabc = len(abc)
|
||||
i=0
|
||||
resultName = []
|
||||
resultID = []
|
||||
resultURL = []
|
||||
matched = "no"
|
||||
upcome = []
|
||||
|
||||
while (i < lenabc):
|
||||
titlet = abc[i] #iterate through the p pulling out only results.
|
||||
#print ("titlet: " + str(titlet))
|
||||
if "/news/preview2.php" in str(titlet):
|
||||
tempName = titlet.findNext(text=True)
|
||||
if ' TPB' not in tempName and ' HC' not in tempName and 'GN-TPB' not in tempName and 'subscription variant' not in tempName.lower():
|
||||
#print publisher + ' found upcoming'
|
||||
if '#' in tempName:
|
||||
tempName = tempName.encode('ascii', 'replace') #.decode('utf-8')
|
||||
if '???' in tempName:
|
||||
tempName = tempName.replace('???', ' ')
|
||||
stissue = tempName.find('#')
|
||||
endissue = tempName.find(' ', stissue)
|
||||
if tempName[stissue+1] == ' ': #if issue has space between # and number, adjust.
|
||||
endissue = tempName.find(' ', stissue+2)
|
||||
if endissue == -1: endissue = len(tempName)
|
||||
issue = tempName[stissue:endissue].lstrip(' ')
|
||||
if ':'in issue: issue = re.sub(':', '', issue).rstrip()
|
||||
exinfo = tempName[endissue:].lstrip(' ')
|
||||
|
||||
issue1 = None
|
||||
issue2 = None
|
||||
|
||||
if '-' in issue:
|
||||
#print ('multiple issues detected. Splitting.')
|
||||
ststart = issue.find('-')
|
||||
issue1 = issue[:ststart]
|
||||
issue2 = '#' + str(issue[ststart+1:])
|
||||
|
||||
if '&' in exinfo:
|
||||
#print ('multiple issues detected. Splitting.')
|
||||
ststart = exinfo.find('&')
|
||||
issue1 = issue # this detects fine
|
||||
issue2 = '#' + str(exinfo[ststart+1:])
|
||||
if '& ' in issue2: issue2 = re.sub("&\\b", "", issue2)
|
||||
exinfo = exinfo.replace(exinfo[ststart+1:len(issue2)], '').strip()
|
||||
if exinfo == '&': exinfo = 'N/A'
|
||||
|
||||
comic = tempName[:stissue].strip()
|
||||
if 'for \$1' in comic:
|
||||
exinfo = 'for $1'
|
||||
comic = comic.replace('for \$1\:', '').lstrip()
|
||||
|
||||
if issue1:
|
||||
upcome.append({
|
||||
'Shipdate': shipdate,
|
||||
'Publisher': publisher.upper(),
|
||||
'Issue': re.sub('#', '',issue1).lstrip(),
|
||||
'Comic': comic.upper(),
|
||||
'Extra': exinfo.upper()
|
||||
})
|
||||
#print ('Comic: ' + comic)
|
||||
#print('issue#: ' + re.sub('#', '', issue1))
|
||||
#print ('extra info: ' + exinfo)
|
||||
if issue2:
|
||||
upcome.append({
|
||||
'Shipdate': shipdate,
|
||||
'Publisher': publisher.upper(),
|
||||
'Issue': re.sub('#', '', issue2).lstrip(),
|
||||
'Comic': comic.upper(),
|
||||
'Extra': exinfo.upper()
|
||||
})
|
||||
#print ('Comic: ' + comic)
|
||||
#print('issue#: ' + re.sub('#', '', issue2))
|
||||
#print ('extra info: ' + exinfo)
|
||||
else:
|
||||
upcome.append({
|
||||
'Shipdate': shipdate,
|
||||
'Publisher': publisher.upper(),
|
||||
'Issue': re.sub('#', '', issue).lstrip(),
|
||||
'Comic': comic.upper(),
|
||||
'Extra': exinfo.upper()
|
||||
})
|
||||
#print ('Comic: ' + comic)
|
||||
#print ('issue#: ' + re.sub('#', '', issue))
|
||||
#print ('extra info: ' + exinfo)
|
||||
else:
|
||||
print ('no issue # to retrieve.')
|
||||
i+=1
|
||||
return upcome
|
||||
#end.
|
||||
|
||||
if __name__ == '__main__':
|
||||
solicit(sys.argv[1], sys.argv[2])
|
|
@ -34,7 +34,6 @@ def dbUpdate(ComicIDList=None):
|
|||
else:
|
||||
comiclist = ComicIDList
|
||||
|
||||
|
||||
logger.info('Starting update for %i active comics' % len(comiclist))
|
||||
|
||||
for comic in comiclist:
|
||||
|
@ -42,7 +41,7 @@ def dbUpdate(ComicIDList=None):
|
|||
comicid = comic[0]
|
||||
else:
|
||||
comicid = comic
|
||||
print "comicid" + str(comicid)
|
||||
#print "comicid:" + str(comicid)
|
||||
mismatch = "no"
|
||||
if not mylar.CV_ONLY or comicid[:1] == "G":
|
||||
CV_EXcomicid = myDB.action("SELECT * from exceptions WHERE ComicID=?", [comicid]).fetchone()
|
||||
|
@ -67,8 +66,8 @@ def dbUpdate(ComicIDList=None):
|
|||
logger.fdebug('Deleting all issue data.')
|
||||
myDB.select('DELETE FROM issues WHERE ComicID=?', [comicid])
|
||||
myDB.select('DELETE FROM annuals WHERE ComicID=?', [comicid])
|
||||
logger.fdebug('Refreshing the series and pulling in new data using only CV.')
|
||||
mylar.importer.addComictoDB(comicid,mismatch)
|
||||
logger.fdebug('Refreshing the series and pulling in new data using only CV.')
|
||||
mylar.importer.addComictoDB(comicid,mismatch,calledfrom='dbupdate')
|
||||
issues_new = myDB.select('SELECT * FROM issues WHERE ComicID=?', [comicid])
|
||||
annuals = []
|
||||
ann_list = []
|
||||
|
@ -83,7 +82,9 @@ def dbUpdate(ComicIDList=None):
|
|||
for issuenew in issues_new:
|
||||
if issuenew['IssueID'] == issue['IssueID'] and issuenew['Status'] != issue['Status']:
|
||||
#if the status is now Downloaded, keep status.
|
||||
if issuenew['Status'] == 'Downloaded': break
|
||||
logger.info('existing status: ' + str(issuenew['Status']))
|
||||
logger.info('new status: ' + str(issue['Status']))
|
||||
if issuenew['Status'] == 'Downloaded' or issue['Status'] == 'Snatched': break
|
||||
#change the status to the previous status
|
||||
ctrlVAL = {'IssueID': issue['IssueID']}
|
||||
newVAL = {'Status': issue['Status']}
|
||||
|
@ -98,6 +99,12 @@ def dbUpdate(ComicIDList=None):
|
|||
mylar.CV_ONETIMER = 0
|
||||
else:
|
||||
mylar.importer.addComictoDB(comicid,mismatch)
|
||||
|
||||
#check global skipped2wanted status here
|
||||
#if mylar.GLOBAL_SKIPPED2WANTED:
|
||||
# logger.fdebug('Global change for ' + str(comicid) + ' - Marking all issues not present as Wanted.')
|
||||
# mylar.webserve.skipped2wanted(comicid,True)
|
||||
|
||||
time.sleep(5) #pause for 5 secs so dont hammer CV and get 500 error
|
||||
logger.info('Update complete')
|
||||
|
||||
|
@ -110,7 +117,7 @@ def latest_update(ComicID, LatestIssue, LatestDate):
|
|||
"LatestDate": str(LatestDate)}
|
||||
myDB.upsert("comics", newlatestDict, latestCTRLValueDict)
|
||||
|
||||
def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None):
|
||||
def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None, futurepull=None):
|
||||
# here we add to upcoming table...
|
||||
myDB = db.DBConnection()
|
||||
dspComicName = ComicName #to make sure that the word 'annual' will be displayed on screen
|
||||
|
@ -125,7 +132,7 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None)
|
|||
"DisplayComicName": dspComicName,
|
||||
"IssueDate": str(IssueDate)}
|
||||
|
||||
#let's refresh the artist here just to make sure if an issue is available/not.
|
||||
#let's refresh the series here just to make sure if an issue is available/not.
|
||||
mismatch = "no"
|
||||
CV_EXcomicid = myDB.action("SELECT * from exceptions WHERE ComicID=?", [ComicID]).fetchone()
|
||||
if CV_EXcomicid is None: pass
|
||||
|
@ -157,30 +164,37 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None)
|
|||
issuechk = myDB.action("SELECT * FROM issues WHERE ComicID=? AND Issue_Number=?", [ComicID, IssueNumber]).fetchone()
|
||||
|
||||
if issuechk is None:
|
||||
logger.fdebug(adjComicName + ' Issue: ' + str(IssueNumber) + ' not present in listings to mark for download...updating comic and adding to Upcoming Wanted Releases.')
|
||||
# we need to either decrease the total issue count, OR indicate that an issue is upcoming.
|
||||
upco_results = myDB.action("SELECT COUNT(*) FROM UPCOMING WHERE ComicID=?",[ComicID]).fetchall()
|
||||
upco_iss = upco_results[0][0]
|
||||
#logger.info("upco_iss: " + str(upco_iss))
|
||||
if int(upco_iss) > 0:
|
||||
#logger.info("There is " + str(upco_iss) + " of " + str(ComicName) + " that's not accounted for")
|
||||
newKey = {"ComicID": ComicID}
|
||||
newVal = {"not_updated_db": str(upco_iss)}
|
||||
myDB.upsert("comics", newVal, newKey)
|
||||
elif int(upco_iss) <=0 and lastupdatechk['not_updated_db']:
|
||||
#if not_updated_db has a value, and upco_iss is > 0, let's zero it back out cause it's updated now.
|
||||
newKey = {"ComicID": ComicID}
|
||||
newVal = {"not_updated_db": ""}
|
||||
myDB.upsert("comics", newVal, newKey)
|
||||
if futurepull is None:
|
||||
logger.fdebug(adjComicName + ' Issue: ' + str(IssueNumber) + ' not present in listings to mark for download...updating comic and adding to Upcoming Wanted Releases.')
|
||||
# we need to either decrease the total issue count, OR indicate that an issue is upcoming.
|
||||
upco_results = myDB.action("SELECT COUNT(*) FROM UPCOMING WHERE ComicID=?",[ComicID]).fetchall()
|
||||
upco_iss = upco_results[0][0]
|
||||
#logger.info("upco_iss: " + str(upco_iss))
|
||||
if int(upco_iss) > 0:
|
||||
#logger.info("There is " + str(upco_iss) + " of " + str(ComicName) + " that's not accounted for")
|
||||
newKey = {"ComicID": ComicID}
|
||||
newVal = {"not_updated_db": str(upco_iss)}
|
||||
myDB.upsert("comics", newVal, newKey)
|
||||
elif int(upco_iss) <=0 and lastupdatechk['not_updated_db']:
|
||||
#if not_updated_db has a value, and upco_iss is > 0, let's zero it back out cause it's updated now.
|
||||
newKey = {"ComicID": ComicID}
|
||||
newVal = {"not_updated_db": ""}
|
||||
myDB.upsert("comics", newVal, newKey)
|
||||
|
||||
if hours > 5 or forcecheck == 'yes':
|
||||
pullupd = "yes"
|
||||
logger.fdebug('Now Refreshing comic ' + ComicName + ' to make sure it is up-to-date')
|
||||
if ComicID[:1] == "G": mylar.importer.GCDimport(ComicID,pullupd)
|
||||
else: mylar.importer.addComictoDB(ComicID,mismatch,pullupd)
|
||||
if hours > 5 or forcecheck == 'yes':
|
||||
pullupd = "yes"
|
||||
logger.fdebug('Now Refreshing comic ' + ComicName + ' to make sure it is up-to-date')
|
||||
if ComicID[:1] == "G": mylar.importer.GCDimport(ComicID,pullupd)
|
||||
else: mylar.importer.addComictoDB(ComicID,mismatch,pullupd)
|
||||
else:
|
||||
logger.fdebug('It has not been longer than 5 hours since we last did this...we will wait so we do not hammer things.')
|
||||
return
|
||||
else:
|
||||
logger.fdebug('It has not been longer than 5 hours since we last did this...we will wait so we do not hammer things.')
|
||||
return
|
||||
# if futurepull is not None, let's just update the status and ComicID
|
||||
nKey = {"ComicID": ComicID}
|
||||
nVal = {"Status": "Wanted"}
|
||||
myDB.upsert("future", nVal, nKey)
|
||||
|
||||
elif issuechk['Issue_Number'] == IssueNumber:
|
||||
logger.fdebug('Comic series already up-to-date ... no need to refresh at this time.')
|
||||
logger.fdebug('Available to be marked for download - checking...' + adjComicName + ' Issue: ' + str(issuechk['Issue_Number']))
|
||||
|
@ -256,12 +270,15 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None)
|
|||
return downstats
|
||||
|
||||
|
||||
def weekly_update(ComicName,IssueNumber,CStatus,CID):
|
||||
def weekly_update(ComicName,IssueNumber,CStatus,CID,futurepull=None):
|
||||
# here we update status of weekly table...
|
||||
# added Issue to stop false hits on series' that have multiple releases in a week
|
||||
# added CStatus to update status flags on Pullist screen
|
||||
myDB = db.DBConnection()
|
||||
issuecheck = myDB.action("SELECT * FROM weekly WHERE COMIC=? AND ISSUE=?", [ComicName,IssueNumber]).fetchone()
|
||||
if futurepull is None:
|
||||
issuecheck = myDB.action("SELECT * FROM weekly WHERE COMIC=? AND ISSUE=?", [ComicName,IssueNumber]).fetchone()
|
||||
else:
|
||||
issuecheck = myDB.action("SELECT * FROM future WHERE COMIC=? AND ISSUE=?", [ComicName,IssueNumber]).fetchone()
|
||||
if issuecheck is not None:
|
||||
controlValue = { "COMIC": str(ComicName),
|
||||
"ISSUE": str(IssueNumber)}
|
||||
|
@ -274,7 +291,14 @@ def weekly_update(ComicName,IssueNumber,CStatus,CID):
|
|||
else:
|
||||
newValue = {"STATUS": "Skipped"}
|
||||
|
||||
myDB.upsert("weekly", newValue, controlValue)
|
||||
if futurepull is None:
|
||||
myDB.upsert("weekly", newValue, controlValue)
|
||||
else:
|
||||
if issuecheck['ComicID'] is not None:
|
||||
newValue = {"STATUS": "Wanted",
|
||||
"ComicID": issuecheck['ComicID']}
|
||||
|
||||
myDB.upsert("future", newValue, controlValue)
|
||||
|
||||
def newpullcheck(ComicName, ComicID):
|
||||
# When adding a new comic, let's check for new issues on this week's pullist and update.
|
||||
|
@ -461,7 +485,12 @@ def forceRescan(ComicID,archive=None):
|
|||
try:
|
||||
tmpfc = fc['comiclist'][fn]
|
||||
except IndexError:
|
||||
break
|
||||
logger.fdebug('Unable to properly retrieve a file listing for the given series.')
|
||||
logger.fdebug('Probably because the filenames being scanned are not in a parseable format')
|
||||
if fn == 0:
|
||||
return
|
||||
else:
|
||||
break
|
||||
temploc= tmpfc['JusttheDigits'].replace('_', ' ')
|
||||
|
||||
# temploc = tmpfc['ComicFilename'].replace('_', ' ')
|
||||
|
@ -711,7 +740,7 @@ def forceRescan(ComicID,archive=None):
|
|||
else:
|
||||
for chk in chkthis:
|
||||
old_status = chk['Status']
|
||||
#logger.fdebug('old_status:' + str(old_status))
|
||||
logger.fdebug('old_status:' + str(old_status))
|
||||
if old_status == "Skipped":
|
||||
if mylar.AUTOWANT_ALL:
|
||||
issStatus = "Wanted"
|
||||
|
@ -730,7 +759,7 @@ def forceRescan(ComicID,archive=None):
|
|||
else:
|
||||
issStatus = "Skipped"
|
||||
|
||||
#logger.fdebug("new status: " + str(issStatus))
|
||||
logger.fdebug("new status: " + str(issStatus))
|
||||
|
||||
update_iss.append({"IssueID": chk['IssueID'],
|
||||
"Status": issStatus})
|
||||
|
|
|
@ -62,7 +62,11 @@ class WebInterface(object):
|
|||
def home(self):
|
||||
myDB = db.DBConnection()
|
||||
comics = myDB.select('SELECT * from comics order by ComicSortName COLLATE NOCASE')
|
||||
return serve_template(templatename="index.html", title="Home", comics=comics)
|
||||
if mylar.ANNUALS_ON:
|
||||
annuals_on = True
|
||||
else:
|
||||
annuals_on = False
|
||||
return serve_template(templatename="index.html", title="Home", comics=comics, annuals_on=annuals_on)
|
||||
home.exposed = True
|
||||
|
||||
def comicDetails(self, ComicID):
|
||||
|
@ -447,7 +451,7 @@ class WebInterface(object):
|
|||
myDB.select('DELETE FROM issues WHERE ComicID=?', [ComicID])
|
||||
myDB.select('DELETE FROM annuals WHERE ComicID=?', [ComicID])
|
||||
logger.fdebug("Refreshing the series and pulling in new data using only CV.")
|
||||
mylar.importer.addComictoDB(ComicID,mismatch)
|
||||
mylar.importer.addComictoDB(ComicID,mismatch,calledfrom='dbupdate')
|
||||
issues_new = myDB.select('SELECT * FROM issues WHERE ComicID=?', [ComicID])
|
||||
annuals = []
|
||||
ann_list = []
|
||||
|
@ -461,8 +465,9 @@ class WebInterface(object):
|
|||
for issue in issues:
|
||||
for issuenew in issues_new:
|
||||
if issuenew['IssueID'] == issue['IssueID'] and issuenew['Status'] != issue['Status']:
|
||||
#if the status is now Downloaded, keep status.
|
||||
if issuenew['Status'] == 'Downloaded': break
|
||||
#if the status is now Downloaded/Snatched, keep status.
|
||||
if issuenew['Status'] == 'Downloaded' or issue['Status'] == 'Snatched':
|
||||
break
|
||||
#change the status to the previous status
|
||||
ctrlVAL = {'IssueID': issue['IssueID']}
|
||||
newVAL = {'Status': issue['Status']}
|
||||
|
@ -488,6 +493,15 @@ class WebInterface(object):
|
|||
#raise cherrypy.HTTPRedirect("comicDetails?ComicID=%s" & ComicID)
|
||||
editIssue.exposed=True
|
||||
|
||||
def force_rss(self):
|
||||
logger.info('attempting to run RSS Check Forcibly')
|
||||
chktorrent = mylar.rsscheck.tehMain(forcerss=True)
|
||||
if chktorrent:
|
||||
logger.info('Successfully ran RSS Force Check.')
|
||||
return
|
||||
|
||||
force_rss.exposed = True
|
||||
|
||||
#def chkTorrents(self, ComicName, pickfeed):
|
||||
# chktorrent = rsscheck.torrents(ComicName,pickfeed)
|
||||
# if chktorrent:
|
||||
|
@ -647,22 +661,47 @@ class WebInterface(object):
|
|||
raise cherrypy.HTTPRedirect(redirect)
|
||||
queueissue.exposed = True
|
||||
|
||||
def unqueueissue(self, IssueID, ComicID):
|
||||
def unqueueissue(self, IssueID, ComicID, ComicName=None, Issue=None, FutureID=None):
|
||||
myDB = db.DBConnection()
|
||||
issue = myDB.action('SELECT * FROM issues WHERE IssueID=?', [IssueID]).fetchone()
|
||||
annchk = 'no'
|
||||
if issue is None:
|
||||
if mylar.ANNUALS_ON:
|
||||
issue = myDB.action('SELECT * FROM annuals WHERE IssueID=?', [IssueID]).fetchone()
|
||||
annchk = 'yes'
|
||||
logger.info(u"Marking " + issue['ComicName'] + " issue # " + issue['Issue_Number'] + " as skipped...")
|
||||
controlValueDict = {'IssueID': IssueID}
|
||||
newValueDict = {'Status': 'Skipped'}
|
||||
if annchk == 'yes':
|
||||
myDB.upsert("annuals", newValueDict, controlValueDict)
|
||||
if ComicName is None:
|
||||
issue = myDB.action('SELECT * FROM issues WHERE IssueID=?', [IssueID]).fetchone()
|
||||
annchk = 'no'
|
||||
if issue is None:
|
||||
if mylar.ANNUALS_ON:
|
||||
issue = myDB.action('SELECT * FROM annuals WHERE IssueID=?', [IssueID]).fetchone()
|
||||
annchk = 'yes'
|
||||
logger.info(u"Marking " + issue['ComicName'] + " issue # " + issue['Issue_Number'] + " as skipped...")
|
||||
controlValueDict = {"IssueID": IssueID}
|
||||
newValueDict = {"Status": "Skipped"}
|
||||
if annchk == 'yes':
|
||||
myDB.upsert("annuals", newValueDict, controlValueDict)
|
||||
else:
|
||||
myDB.upsert("issues", newValueDict, controlValueDict)
|
||||
raise cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % ComicID)
|
||||
else:
|
||||
myDB.upsert("issues", newValueDict, controlValueDict)
|
||||
raise cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % ComicID)
|
||||
#if ComicName is not None, then it's from the FuturePull list that we're 'unwanting' an issue.
|
||||
#ComicID may be present if it's a watch from the Watchlist, otherwise it won't exist.
|
||||
if ComicID is not None and ComicID != 'None':
|
||||
logger.info('comicid present:' + str(ComicID))
|
||||
thefuture = myDB.action('SELECT * FROM future WHERE ComicID=?', [ComicID]).fetchone()
|
||||
else:
|
||||
logger.info('FutureID: ' + str(FutureID))
|
||||
logger.info('no comicid - ComicName: ' + str(ComicName) + ' -- Issue: #' + str(Issue))
|
||||
thefuture = myDB.action('SELECT * FROM future WHERE FutureID=?', [FutureID]).fetchone()
|
||||
if thefuture is None:
|
||||
logger.info('Cannot find the corresponding issue in the Futures List for some reason. This is probably an Error.')
|
||||
else:
|
||||
|
||||
logger.info('Marking ' + thefuture['COMIC'] + ' issue # ' + thefuture['ISSUE'] + ' as skipped...')
|
||||
if ComicID is not None and ComicID != 'None':
|
||||
cVDict = {"ComicID": thefuture['ComicID']}
|
||||
else:
|
||||
cVDict = {"FutureID": thefuture['FutureID']}
|
||||
nVDict = {"Status": "Skipped"}
|
||||
logger.info('cVDict:' + str(cVDict))
|
||||
logger.info('nVDict:' + str(nVDict))
|
||||
myDB.upsert("future", nVDict, cVDict)
|
||||
|
||||
unqueueissue.exposed = True
|
||||
|
||||
def archiveissue(self, IssueID):
|
||||
|
@ -695,7 +734,7 @@ class WebInterface(object):
|
|||
try:
|
||||
x = float(weekly['ISSUE'])
|
||||
except ValueError, e:
|
||||
if 'au' in weekly['ISSUE'].lower() or 'ai' in weekly['ISSUE'].lower():
|
||||
if 'au' in weekly['ISSUE'].lower() or 'ai' in weekly['ISSUE'].lower() or '.inh' in weekly['ISSUE'].lower() or '.now' in weekly['ISSUE'].lower():
|
||||
x = weekly['ISSUE']
|
||||
|
||||
if x is not None:
|
||||
|
@ -716,6 +755,82 @@ class WebInterface(object):
|
|||
return serve_template(templatename="weeklypull.html", title="Weekly Pull", weeklyresults=weeklyresults, pulldate=pulldate['SHIPDATE'], pullfilter=True, weekfold=weekfold)
|
||||
pullist.exposed = True
|
||||
|
||||
def futurepull(self):
|
||||
from mylar import solicit
|
||||
#get month-year here, and self-populate in future
|
||||
now = datetime.datetime.now()
|
||||
if len(str(now.month)) != 2:
|
||||
month = '0' + str(now.month)
|
||||
else:
|
||||
month = str(now.month)
|
||||
year = str(now.year)
|
||||
logger.fdebug('month = ' + str(month))
|
||||
logger.fdebug('year = ' + str(year))
|
||||
threading.Thread(target=solicit.solicit(month,year)).start()
|
||||
raise cherrypy.HTTPRedirect("home")
|
||||
futurepull.exposed = True
|
||||
|
||||
def futurepulllist(self):
|
||||
myDB = db.DBConnection()
|
||||
futureresults = []
|
||||
popit = myDB.select("SELECT * FROM sqlite_master WHERE name='future' and type='table'")
|
||||
if popit:
|
||||
f_results = myDB.select("SELECT SHIPDATE, PUBLISHER, ISSUE, COMIC, EXTRA, STATUS, ComicID, FutureID from future")
|
||||
for future in f_results:
|
||||
x = None
|
||||
if future['ISSUE'] is None: break
|
||||
try:
|
||||
x = float(future['ISSUE'])
|
||||
except ValueError, e:
|
||||
if 'au' in future['ISSUE'].lower() or 'ai' in future['ISSUE'].lower() or '.inh' in future['ISSUE'].lower() or '.now' in future['ISSUE'].lower():
|
||||
x = future['ISSUE']
|
||||
|
||||
if future['EXTRA'] == 'N/A' or future['EXTRA'] == '':
|
||||
future_extra = ''
|
||||
else:
|
||||
future_extra = future['EXTRA']
|
||||
if '(of' in future['EXTRA'].lower():
|
||||
future_extra = re.sub('[\(\)]', '', future['EXTRA'])
|
||||
|
||||
if x is not None:
|
||||
futureresults.append({
|
||||
"SHIPDATE" : future['SHIPDATE'],
|
||||
"PUBLISHER" : future['PUBLISHER'],
|
||||
"ISSUE" : future['ISSUE'],
|
||||
"COMIC" : future['COMIC'],
|
||||
"EXTRA" : future_extra,
|
||||
"STATUS" : future['STATUS'],
|
||||
"COMICID" : future['ComicID'],
|
||||
"FUTUREID" : future['FutureID']
|
||||
})
|
||||
futureresults = sorted(futureresults, key=itemgetter('SHIPDATE','PUBLISHER','COMIC'), reverse=False)
|
||||
else:
|
||||
logger.error('No results to post for upcoming issues...something is probably wrong')
|
||||
return
|
||||
return serve_template(templatename="futurepull.html", title="future Pull", futureresults=futureresults, pullfilter=True)
|
||||
|
||||
futurepulllist.exposed = True
|
||||
|
||||
def add2futurewatchlist(self, ComicName, Issue, Publisher, ShipDate, FutureID):
|
||||
logger.info('Adding ' + ComicName + ' # ' + str(Issue) + ' to future upcoming watchlist')
|
||||
myDB = db.DBConnection()
|
||||
chkfuture = myDB.action('SELECT * FROM futureupcoming WHERE ComicName=? AND IssueNumber=?', [ComicName, Issue]).fetchone()
|
||||
if chkfuture is not None:
|
||||
logger.info('Already on Future Upcoming list - not adding at this time.')
|
||||
return
|
||||
newCtrl = {"ComicName": ComicName,
|
||||
"IssueNumber": Issue,
|
||||
"Publisher": Publisher}
|
||||
newVal = {"Status": "Wanted",
|
||||
"IssueDate": ShipDate}
|
||||
myDB.upsert("futureupcoming", newVal, newCtrl)
|
||||
|
||||
fCtrl = {"FutureID": FutureID}
|
||||
fVal = {"Status": "Wanted"}
|
||||
myDB.upsert("future", fVal, fCtrl)
|
||||
|
||||
add2futurewatchlist.exposed = True
|
||||
|
||||
def filterpull(self):
|
||||
myDB = db.DBConnection()
|
||||
weeklyresults = myDB.select("SELECT * from weekly")
|
||||
|
@ -781,7 +896,7 @@ class WebInterface(object):
|
|||
return serve_template(templatename="upcoming.html", title="Upcoming", upcoming=upcoming, issues=issues, ann_list=ann_list)
|
||||
upcoming.exposed = True
|
||||
|
||||
def skipped2wanted(self, comicid):
|
||||
def skipped2wanted(self, comicid, fromupdate=None):
|
||||
# change all issues for a given ComicID that are Skipped, into Wanted.
|
||||
issuestowanted = []
|
||||
issuesnumwant = []
|
||||
|
@ -794,9 +909,14 @@ class WebInterface(object):
|
|||
myDB.upsert("issues", mvvalues, mvcontroldict)
|
||||
issuestowanted.append(skippy['IssueID'])
|
||||
issuesnumwant.append(skippy['Issue_Number'])
|
||||
if len(issuestowanted) > 0 :
|
||||
logger.info("Marking issues: %s as Wanted" % issuesnumwant)
|
||||
threading.Thread(target=search.searchIssueIDList, args=[issuestowanted]).start()
|
||||
if len(issuestowanted) > 0:
|
||||
if fromupdate is None:
|
||||
logger.info("Marking issues: %s as Wanted" % issuesnumwant)
|
||||
threading.Thread(target=search.searchIssueIDList, args=[issuestowanted]).start()
|
||||
else:
|
||||
logger.info('Marking issues: %s as Wanted' & issuesnumwant)
|
||||
logger.info('These will be searched for on next Search Scan / Force Check')
|
||||
return
|
||||
raise cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % [comicid])
|
||||
skipped2wanted.exposed = True
|
||||
|
||||
|
@ -921,7 +1041,7 @@ class WebInterface(object):
|
|||
else:
|
||||
comicsToAdd.append(ComicID)
|
||||
if len(comicsToAdd) > 0:
|
||||
logger.debug("Refreshing comics: %s" % comicsToAdd)
|
||||
logger.fdebug("Refreshing comics: %s" % comicsToAdd)
|
||||
#threading.Thread(target=importer.addComicIDListToDB, args=[comicsToAdd]).start()
|
||||
threading.Thread(target=updater.dbUpdate, args=[comicsToAdd]).start()
|
||||
raise cherrypy.HTTPRedirect("home")
|
||||
|
@ -1101,11 +1221,22 @@ class WebInterface(object):
|
|||
arc_match = []
|
||||
wantedlist = []
|
||||
|
||||
sarc_title = None
|
||||
showonreadlist = 1 # 0 won't show storyarcissues on readinglist main page, 1 will show
|
||||
|
||||
for arc in ArcWatch:
|
||||
logger.fdebug("arc: " + arc['storyarc'] + " : " + arc['ComicName'] + " : " + arc['IssueNumber'])
|
||||
#cycle through the story arcs here for matches on the watchlist
|
||||
|
||||
if sarc_title != arc['storyarc']:
|
||||
dstloc = os.path.join(mylar.DESTINATION_DIR, 'StoryArcs', arc['storyarc'])
|
||||
if os.path.isdir(dstloc):
|
||||
logger.info('Validating Directory (' + dstloc + '). Already exists! Continuing...')
|
||||
else:
|
||||
logger.fdebug('Updated Directory doesn not exist! - attempting to create now.')
|
||||
filechecker.validateAndCreateDirectory(dstloc, True)
|
||||
|
||||
|
||||
mod_arc = re.sub('[\:/,\'\/\-\&\%\$\#\@\!\*\+\.]', '', arc['ComicName'])
|
||||
mod_arc = re.sub('\\bthe\\b', '', mod_arc.lower())
|
||||
mod_arc = re.sub('\\band\\b', '', mod_arc.lower())
|
||||
|
@ -1165,6 +1296,7 @@ class WebInterface(object):
|
|||
|
||||
dstloc = os.path.join(mylar.DESTINATION_DIR, 'StoryArcs', arc['storyarc'])
|
||||
logger.fdebug('destination location set to : ' + dstloc)
|
||||
|
||||
filechk = filechecker.listFiles(dstloc, arc['ComicName'], sarc='true')
|
||||
fn = 0
|
||||
fccnt = filechk['comiccount']
|
||||
|
@ -1192,7 +1324,9 @@ class WebInterface(object):
|
|||
ctrlVal = {"IssueArcID": arc['IssueArcID'] }
|
||||
myDB.upsert("readinglist",newVal,ctrlVal)
|
||||
fn+=1
|
||||
|
||||
|
||||
sarc_title = arc['storyarc']
|
||||
|
||||
logger.fdebug("we matched on " + str(len(arc_match)) + " issues")
|
||||
|
||||
for m_arc in arc_match:
|
||||
|
@ -1251,7 +1385,7 @@ class WebInterface(object):
|
|||
"IssueID": issue['IssueID']}
|
||||
myDB.upsert("readinglist",newVal,ctrlVal)
|
||||
logger.info("Marked " + issue['ComicName'] + " :# " + str(issue['Issue_Number']) + " as Wanted.")
|
||||
|
||||
|
||||
|
||||
ArcWatchlist.exposed = True
|
||||
|
||||
|
@ -1528,7 +1662,7 @@ class WebInterface(object):
|
|||
return serve_template(templatename="searchresults.html", title='Import Results for: "' + comicname + '"',searchresults=sresults, type=type, imported='confirm', ogcname=comicid)
|
||||
confirmResult.exposed = True
|
||||
|
||||
def comicScan(self, path, scan=0, redirect=None, autoadd=0, libraryscan=0, imp_move=0, imp_rename=0, imp_metadata=0):
|
||||
def comicScan(self, path, scan=0, libraryscan=0, redirect=None, autoadd=0, imp_move=0, imp_rename=0, imp_metadata=0):
|
||||
mylar.LIBRARYSCAN = libraryscan
|
||||
mylar.ADD_COMICS = autoadd
|
||||
mylar.COMIC_DIR = path
|
||||
|
@ -1585,7 +1719,7 @@ class WebInterface(object):
|
|||
|
||||
|
||||
#self.importResults()
|
||||
raise cherrypy.HTTPRedirect("importResults")
|
||||
raise cherrypy.HTTPRedirect("importResults")
|
||||
if redirect:
|
||||
raise cherrypy.HTTPRedirect(redirect)
|
||||
else:
|
||||
|
@ -2236,6 +2370,52 @@ class WebInterface(object):
|
|||
|
||||
configUpdate.exposed = True
|
||||
|
||||
def SABtest(self):
|
||||
logger.info('testing SABnzbd connection')
|
||||
if mylar.USE_SABNZBD:
|
||||
import urllib2
|
||||
from xml.dom.minidom import parseString
|
||||
|
||||
#if user/pass given, we can auto-fill the API ;)
|
||||
if mylar.SAB_USERNAME is None or mylar.SAB_PASSWORD is None:
|
||||
logger.info('No Username / Password provided for SABnzbd credentials. Unable to auto-grab API key')
|
||||
|
||||
logger.info('testing connection to SABnzbd @ ' + mylar.SAB_HOST)
|
||||
logger.info('SAB API Key (FULL API KEY):' + mylar.SAB_APIKEY)
|
||||
if mylar.SAB_HOST.endswith('/'):
|
||||
sabhost = mylar.SAB_HOST
|
||||
else:
|
||||
sabhost = mylar.SAB_HOST + '/'
|
||||
querysab = sabhost + "api?mode=get_config§ion=misc&output=xml&apikey=" + mylar.SAB_APIKEY
|
||||
file = urllib2.urlopen(querysab)
|
||||
data = file.read()
|
||||
file.close()
|
||||
dom = parseString(data)
|
||||
|
||||
try:
|
||||
q_sabhost = dom.getElementsByTagName('host')[0].firstChild.wholeText
|
||||
q_nzbkey = dom.getElementsByTagName('nzb_key')[0].firstChild.wholeText
|
||||
q_apikey = dom.getElementsByTagName('api_key')[0].firstChild.wholeText
|
||||
except:
|
||||
errorm = dom.getElementsByTagName('error')[0].firstChild.wholeText
|
||||
logger.error(u"Error detected attempting to retrieve SAB data : " + errorm)
|
||||
return
|
||||
|
||||
#test which apikey provided
|
||||
if q_nzbkey != mylar.SAB_APIKEY:
|
||||
if q_apikey != mylar.SAB_APIKEY:
|
||||
logger.info('API KEY provided does not match with SABnzbd')
|
||||
else:
|
||||
logger.info('API KEY provided is FULL API KEY')
|
||||
else:
|
||||
logger.info('API KEY provided is NZB API KEY')
|
||||
|
||||
logger.info('Connection to SABnzbd tested sucessfully')
|
||||
else:
|
||||
logger.info('You do not have anything stated for SAB Host. Please correct and try again.')
|
||||
return
|
||||
SABtest.exposed = True
|
||||
|
||||
def shutdown(self):
|
||||
mylar.SIGNAL = 'shutdown'
|
||||
message = 'Shutting Down...'
|
||||
|
|
|
@ -346,8 +346,11 @@ def pullit(forcecheck=None):
|
|||
os.remove( str(pullpath) + "newreleases.txt" )
|
||||
pullitcheck(forcecheck=forcecheck)
|
||||
|
||||
def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None):
|
||||
logger.info(u"Checking the Weekly Releases list for comics I'm watching...")
|
||||
def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None, futurepull=None):
|
||||
if futurepull is None:
|
||||
logger.info(u"Checking the Weekly Releases list for comics I'm watching...")
|
||||
else:
|
||||
logger.info('Checking the Future Releases list for upcoming comics I am watching for...')
|
||||
myDB = db.DBConnection()
|
||||
|
||||
not_t = ['TP',
|
||||
|
@ -461,7 +464,10 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None):
|
|||
sqlsearch = re.sub(r'\s', '%', sqlsearch)
|
||||
sqlsearch = sqlsearch + '%'
|
||||
logger.fdebug("searchsql: " + sqlsearch)
|
||||
weekly = myDB.select('SELECT PUBLISHER, ISSUE, COMIC, EXTRA, SHIPDATE FROM weekly WHERE COMIC LIKE (?)', [sqlsearch])
|
||||
if futurepull is None:
|
||||
weekly = myDB.select('SELECT PUBLISHER, ISSUE, COMIC, EXTRA, SHIPDATE FROM weekly WHERE COMIC LIKE (?)', [sqlsearch])
|
||||
else:
|
||||
weekly = myDB.select('SELECT PUBLISHER, ISSUE, COMIC, EXTRA, SHIPDATE FROM future WHERE COMIC LIKE (?)', [sqlsearch])
|
||||
#cur.execute('SELECT PUBLISHER, ISSUE, COMIC, EXTRA, SHIPDATE FROM weekly WHERE COMIC LIKE (?)', [lines[cnt]])
|
||||
for week in weekly:
|
||||
if week == None:
|
||||
|
@ -552,10 +558,16 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None):
|
|||
ComicDate = str(week['SHIPDATE'])
|
||||
#ComicName = str(unlines[cnt])
|
||||
logger.fdebug("Watchlist hit for : " + ComicName + " ISSUE: " + str(watchfndiss[tot -1]))
|
||||
# here we add to comics.latest
|
||||
updater.latest_update(ComicID=ComicID, LatestIssue=ComicIssue, LatestDate=ComicDate)
|
||||
# here we add to upcoming table...
|
||||
statusupdate = updater.upcoming_update(ComicID=ComicID, ComicName=ComicName, IssueNumber=ComicIssue, IssueDate=ComicDate, forcecheck=forcecheck)
|
||||
|
||||
if futurepull is None:
|
||||
# here we add to comics.latest
|
||||
updater.latest_update(ComicID=ComicID, LatestIssue=ComicIssue, LatestDate=ComicDate)
|
||||
# here we add to upcoming table...
|
||||
statusupdate = updater.upcoming_update(ComicID=ComicID, ComicName=ComicName, IssueNumber=ComicIssue, IssueDate=ComicDate, forcecheck=forcecheck)
|
||||
else:
|
||||
# here we add to upcoming table...
|
||||
statusupdate = updater.upcoming_update(ComicID=ComicID, ComicName=ComicName, IssueNumber=ComicIssue, IssueDate=ComicDate, forcecheck=forcecheck, futurepull='yes')
|
||||
|
||||
# here we update status of weekly table...
|
||||
if statusupdate is not None:
|
||||
cstatus = statusupdate['Status']
|
||||
|
@ -563,7 +575,13 @@ def pullitcheck(comic1off_name=None,comic1off_id=None,forcecheck=None):
|
|||
else:
|
||||
cstatus = None
|
||||
cstatusid = None
|
||||
updater.weekly_update(ComicName=week['COMIC'], IssueNumber=ComicIssue, CStatus=cstatus, CID=cstatusid)
|
||||
#set the variable fp to denote updating the futurepull list ONLY
|
||||
if futurepull is None:
|
||||
fp = None
|
||||
else:
|
||||
cstatusid = ComicID
|
||||
fp = "yes"
|
||||
updater.weekly_update(ComicName=week['COMIC'], IssueNumber=ComicIssue, CStatus=cstatus, CID=cstatusid, futurepull=fp)
|
||||
break
|
||||
break
|
||||
break
|
||||
|
|
Loading…
Reference in New Issue