FIX: fix for multiple threads being started when saving/starting which would lead to locking and general problems

This commit is contained in:
evilhero 2019-03-02 14:10:33 -05:00
parent 7e9515d676
commit 6a9edb44e7
5 changed files with 271 additions and 178 deletions

View File

@ -880,32 +880,6 @@ class PostProcessor(object):
elif self.matched is True:
logger.warn('%s[MATCH: %s - %s] We matched by name for this series, but cannot find a corresponding issue number in the series list.' % (module, cs['ComicName'], cs['ComicID']))
#mlp = []
#xmld = filechecker.FileChecker()
#if len(manual_list) > 1:
# #in case the manual pp matches on more than one series in the watchlist, drop back down to exact name matching to see if we can narrow
# #the matches down further to the point where there's only one exact match. Not being able to match specifically when there is more than
# #one item in the manual list that's matched to the same file will result in a dupe_src error and/or mistakingly PP'ing against the
# #wrong series.
# for x in manual_list:
# xmld1 = xmld.dynamic_replace(helpers.conversion(x['ComicName']))
# xseries = xmld1['mod_seriesname'].lower()
# xmld2 = xmld.dynamic_replace(helpers.conversion(x['Series']))
# xfile = xmld2['mod_seriesname'].lower()
# #logger.info('[xseries:%s][xfile:%s]' % (xseries,xfile))
# if re.sub('\|', '', xseries).strip() == re.sub('\|', '', xfile).strip():
# logger.fdebug('%s[DEFINITIVE-NAME MATCH] Definitive name match exactly to : %s [%s]' % (module, x['ComicName'], x['ComicID']))
# mlp.append(x)
# else:
# pass
# if len(mlp) == 1:
# manual_list = mlp
# logger.fdebug('%s[CONFIRMED-FORCE-OVERRIDE] Over-ride of matching taken due to exact name matching of series' % module)
# else:
# logger.warn('%s[CONFIRMATION-PROBLEM] Unable to determine proper match for series as more than one successful match came up.' % module)
#we should setup for manual post-processing of story-arc issues here
#we can also search by ComicID to just grab those particular arcs as an alternative as well (not done)

View File

@ -32,7 +32,6 @@ import Queue
import platform
import locale
import re
from threading import Lock, Thread
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.interval import IntervalTrigger
@ -168,6 +167,7 @@ def initialize(config_file):
global CONFIG, _INITIALIZED, QUIET, CONFIG_FILE, OS_DETECT, MAINTENANCE, CURRENT_VERSION, LATEST_VERSION, COMMITS_BEHIND, INSTALL_TYPE, IMPORTLOCK, PULLBYFILE, INKDROPS_32P, \
DONATEBUTTON, CURRENT_WEEKNUMBER, CURRENT_YEAR, UMASK, USER_AGENT, SNATCHED_QUEUE, NZB_QUEUE, PP_QUEUE, SEARCH_QUEUE, DDL_QUEUE, PULLNEW, COMICSORT, WANTED_TAB_OFF, CV_HEADERS, \
IMPORTBUTTON, IMPORT_FILES, IMPORT_TOTALFILES, IMPORT_CID_COUNT, IMPORT_PARSED_COUNT, IMPORT_FAILURE_COUNT, CHECKENABLED, CVURL, DEMURL, WWTURL, WWT_CF_COOKIEVALUE, \
DDLPOOL, NZBPOOL, SNPOOL, PPPOOL, SEARCHPOOL, \
USE_SABNZBD, USE_NZBGET, USE_BLACKHOLE, USE_RTORRENT, USE_UTORRENT, USE_QBITTORRENT, USE_DELUGE, USE_TRANSMISSION, USE_WATCHDIR, SAB_PARAMS, \
PROG_DIR, DATA_DIR, CMTAGGER_PATH, DOWNLOAD_APIKEY, LOCAL_IP, STATIC_COMICRN_VERSION, STATIC_APC_VERSION, KEYS_32P, AUTHKEY_32P, FEED_32P, FEEDINFO_32P, \
MONITOR_STATUS, SEARCH_STATUS, RSS_STATUS, WEEKLY_STATUS, VERSION_STATUS, UPDATER_STATUS, DBUPDATE_INTERVAL, LOG_LANG, LOG_CHARSET, APILOCK, SEARCHLOCK, DDL_LOCK, LOG_LEVEL, \
@ -375,40 +375,21 @@ def start():
ss = searchit.CurrentSearcher()
SCHED.add_job(func=ss.run, id='search', name='Auto-Search', next_run_time=None, trigger=IntervalTrigger(hours=0, minutes=CONFIG.SEARCH_INTERVAL, timezone='UTC'))
#thread queue control..
queue_schedule('search_queue', 'start')
if all([CONFIG.ENABLE_TORRENTS, CONFIG.AUTO_SNATCH, OS_DETECT != 'Windows']) and any([CONFIG.TORRENT_DOWNLOADER == 2, CONFIG.TORRENT_DOWNLOADER == 4]):
logger.info('[AUTO-SNATCHER] Auto-Snatch of completed torrents enabled & attempting to background load....')
SNPOOL = threading.Thread(target=helpers.worker_main, args=(SNATCHED_QUEUE,), name="AUTO-SNATCHER")
SNPOOL.start()
logger.info('[AUTO-SNATCHER] Succesfully started Auto-Snatch add-on - will now monitor for completed torrents on client....')
queue_schedule('snatched_queue', 'start')
if CONFIG.POST_PROCESSING is True and ( all([CONFIG.NZB_DOWNLOADER == 0, CONFIG.SAB_CLIENT_POST_PROCESSING is True]) or all([CONFIG.NZB_DOWNLOADER == 1, CONFIG.NZBGET_CLIENT_POST_PROCESSING is True]) ):
if CONFIG.NZB_DOWNLOADER == 0:
logger.info('[SAB-MONITOR] Completed post-processing handling enabled for SABnzbd. Attempting to background load....')
elif CONFIG.NZB_DOWNLOADER == 1:
logger.info('[NZBGET-MONITOR] Completed post-processing handling enabled for NZBGet. Attempting to background load....')
NZBPOOL = threading.Thread(target=helpers.nzb_monitor, args=(NZB_QUEUE,), name="AUTO-COMPLETE-NZB")
NZBPOOL.start()
if CONFIG.NZB_DOWNLOADER == 0:
logger.info('[AUTO-COMPLETE-NZB] Succesfully started Completed post-processing handling for SABnzbd - will now monitor for completed nzbs within sabnzbd and post-process automatically....')
elif CONFIG.NZB_DOWNLOADER == 1:
logger.info('[AUTO-COMPLETE-NZB] Succesfully started Completed post-processing handling for NZBGet - will now monitor for completed nzbs within nzbget and post-process automatically....')
queue_schedule('nzb_queue', 'start')
logger.info('[SEARCH-QUEUE] Attempting to background load the search queue....')
SEARCHPOOL = threading.Thread(target=helpers.search_queue, args=(SEARCH_QUEUE,), name="SEARCH-QUEUE")
SEARCHPOOL.start()
if CONFIG.POST_PROCESSING is True:
logger.info('[POST-PROCESS-QUEUE] Post Process queue enabled & monitoring for api requests....')
PPPOOL = threading.Thread(target=helpers.postprocess_main, args=(PP_QUEUE,), name="POST-PROCESS-QUEUE")
PPPOOL.start()
logger.info('[POST-PROCESS-QUEUE] Succesfully started Post-Processing Queuer....')
queue_schedule('pp_queue', 'start')
if CONFIG.ENABLE_DDL is True:
logger.info('[DDL-QUEUE] DDL Download queue enabled & monitoring for requests....')
DDLPOOL = threading.Thread(target=helpers.ddl_downloader, args=(DDL_QUEUE,), name="DDL-QUEUE")
DDLPOOL.start()
logger.info('[DDL-QUEUE] Succesfully started DDL Download Queuer....')
queue_schedule('ddl_queue', 'start')
helpers.latestdate_fix()
if CONFIG.ALT_PULL == 2:
@ -495,6 +476,183 @@ def start():
started = True
def queue_schedule(queuetype, mode):
#global _INITIALIZED
if mode == 'start':
if queuetype == 'snatched_queue':
try:
if mylar.SNPOOL.isAlive() is True:
return
except Exception as e:
pass
logger.info('[AUTO-SNATCHER] Auto-Snatch of completed torrents enabled & attempting to background load....')
mylar.SNPOOL = threading.Thread(target=helpers.worker_main, args=(SNATCHED_QUEUE,), name="AUTO-SNATCHER")
mylar.SNPOOL.start()
logger.info('[AUTO-SNATCHER] Succesfully started Auto-Snatch add-on - will now monitor for completed torrents on client....')
elif queuetype == 'nzb_queue':
try:
if mylar.NZBPOOL.isAlive() is True:
return
except Exception as e:
pass
if CONFIG.NZB_DOWNLOADER == 0:
logger.info('[SAB-MONITOR] Completed post-processing handling enabled for SABnzbd. Attempting to background load....')
elif CONFIG.NZB_DOWNLOADER == 1:
logger.info('[NZBGET-MONITOR] Completed post-processing handling enabled for NZBGet. Attempting to background load....')
mylar.NZBPOOL = threading.Thread(target=helpers.nzb_monitor, args=(NZB_QUEUE,), name="AUTO-COMPLETE-NZB")
mylar.NZBPOOL.start()
if CONFIG.NZB_DOWNLOADER == 0:
logger.info('[AUTO-COMPLETE-NZB] Succesfully started Completed post-processing handling for SABnzbd - will now monitor for completed nzbs within sabnzbd and post-process automatically...')
elif CONFIG.NZB_DOWNLOADER == 1:
logger.info('[AUTO-COMPLETE-NZB] Succesfully started Completed post-processing handling for NZBGet - will now monitor for completed nzbs within nzbget and post-process automatically...')
elif queuetype == 'search_queue':
try:
if mylar.SEARCHPOOL.isAlive() is True:
return
except Exception as e:
pass
logger.info('[SEARCH-QUEUE] Attempting to background load the search queue....')
mylar.SEARCHPOOL = threading.Thread(target=helpers.search_queue, args=(SEARCH_QUEUE,), name="SEARCH-QUEUE")
mylar.SEARCHPOOL.start()
logger.info('[SEARCH-QUEUE] Successfully started the Search Queuer...')
elif queuetype == 'pp_queue':
try:
if mylar.PPPOOL.isAlive() is True:
return
except Exception as e:
pass
logger.info('[POST-PROCESS-QUEUE] Post Process queue enabled & monitoring for api requests....')
mylar.PPPOOL = threading.Thread(target=helpers.postprocess_main, args=(PP_QUEUE,), name="POST-PROCESS-QUEUE")
mylar.PPPOOL.start()
logger.info('[POST-PROCESS-QUEUE] Succesfully started Post-Processing Queuer....')
elif queuetype == 'ddl_queue':
try:
if mylar.DDLPOOL.isAlive() is True:
return
except Exception as e:
pass
logger.info('[DDL-QUEUE] DDL Download queue enabled & monitoring for requests....')
mylar.DDLPOOL = threading.Thread(target=helpers.ddl_downloader, args=(DDL_QUEUE,), name="DDL-QUEUE")
mylar.DDLPOOL.start()
logger.info('[DDL-QUEUE:] Succesfully started DDL Download Queuer....')
else:
if (queuetype == 'nzb_queue') or mode == 'shutdown':
try:
if mylar.NZBPOOL.isAlive() is False:
return
elif all([mode!= 'shutdown', mylar.CONFIG.POST_PROCESSING is True]) and ( all([mylar.CONFIG.NZB_DOWNLOADER == 0, mylar.CONFIG.SAB_CLIENT_POST_PROCESSING is True]) or all([mylar.CONFIG.NZB_DOWNLOADER == 1, mylar.CONFIG.NZBGET_CLIENT_POST_PROCESSING is True]) ):
return
except Exception as e:
return
logger.fdebug('Terminating the NZB auto-complete queue thread')
try:
mylar.NZB_QUEUE.put('exit')
mylar.NZBPOOL.join(5)
logger.fdebug('Joined pool for termination - successful')
except KeyboardInterrupt:
mylar.NZB_QUEUE.put('exit')
mylar.NZBPOOL.join(5)
except AssertionError:
if mode == 'shutdown':
os._exit(0)
if (queuetype == 'snatched_queue') or mode == 'shutdown':
try:
if mylar.SNPOOL.isAlive() is False:
return
elif all([mode != 'shutdown', mylar.CONFIG.ENABLE_TORRENTS is True, mylar.CONFIG.AUTO_SNATCH is True, OS_DETECT != 'Windows']) and any([mylar.CONFIG.TORRENT_DOWNLOADER == 2, mylar.CONFIG.TORRENT_DOWNLOADER == 4]):
return
except Exception as e:
return
logger.fdebug('Terminating the auto-snatch thread.')
try:
mylar.SNATCHED_QUEUE.put('exit')
mylar.SNPOOL.join(5)
logger.fdebug('Joined pool for termination - successful')
except KeyboardInterrupt:
mylar.SNATCHED_QUEUE.put('exit')
mylar.SNPOOL.join(5)
except AssertionError:
if mode == 'shutdown':
os._exit(0)
if (queuetype == 'search_queue') or mode == 'shutdown':
try:
if mylar.SEARCHPOOL.isAlive() is False:
return
except Exception as e:
return
logger.fdebug('Terminating the search queue thread.')
try:
mylar.SEARCH_QUEUE.put('exit')
mylar.SEARCHPOOL.join(5)
logger.fdebug('Joined pool for termination - successful')
except KeyboardInterrupt:
mylar.SEARCH_QUEUE.put('exit')
mylar.SEARCHPOOL.join(5)
except AssertionError:
if mode == 'shutdown':
os._exit(0)
if (queuetype == 'pp_queue') or mode == 'shutdown':
try:
if mylar.PPPOOL.isAlive() is False:
return
elif all([mylar.CONFIG.POST_PROCESSING is True, mode != 'shutdown']):
return
except Exception as e:
return
logger.fdebug('Terminating the post-processing queue thread.')
try:
mylar.PP_QUEUE.put('exit')
mylar.PPPOOL.join(5)
logger.fdebug('Joined pool for termination - successful')
except KeyboardInterrupt:
mylar.PP_QUEUE.put('exit')
mylar.PPPOOL.join(5)
except AssertionError:
if mode == 'shutdown':
os._exit(0)
if (queuetype == 'ddl_queue') or mode == 'shutdown':
try:
if mylar.DDLPOOL.isAlive() is False:
return
elif all([mylar.CONFIG.ENABLE_DDL is True, mode != 'shutdown']):
return
except Exception as e:
return
logger.fdebugo('Terminating the DDL download queue thread')
try:
mylar.DDL_QUEUE.put('exit')
mylar.DDLPOOL.join(5)
logger.fdebug('Joined pool for termination - successful')
except KeyboardInterrupt:
mylar.DDL_QUEUE.put('exit')
DDLPOOL.join(5)
except AssertionError:
if mode == 'shutdown':
os._exit(0)
def dbcheck():
conn = sqlite3.connect(DB_FILE)
c_error = 'sqlite3.OperationalError'
@ -528,7 +686,7 @@ def dbcheck():
c.execute('CREATE TABLE IF NOT EXISTS jobhistory (JobName TEXT, prev_run_datetime timestamp, prev_run_timestamp REAL, next_run_datetime timestamp, next_run_timestamp REAL, last_run_completed TEXT, successful_completions TEXT, failed_completions TEXT, status TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS manualresults (provider TEXT, id TEXT, kind TEXT, comicname TEXT, volume TEXT, oneoff TEXT, fullprov TEXT, issuenumber TEXT, modcomicname TEXT, name TEXT, link TEXT, size TEXT, pack_numbers TEXT, pack_issuelist TEXT, comicyear TEXT, issuedate TEXT, tmpprov TEXT, pack TEXT, issueid TEXT, comicid TEXT, sarc TEXT, issuearcid TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS storyarcs(StoryArcID TEXT, ComicName TEXT, IssueNumber TEXT, SeriesYear TEXT, IssueYEAR TEXT, StoryArc TEXT, TotalIssues TEXT, Status TEXT, inCacheDir TEXT, Location TEXT, IssueArcID TEXT, ReadingOrder INT, IssueID TEXT, ComicID TEXT, ReleaseDate TEXT, IssueDate TEXT, Publisher TEXT, IssuePublisher TEXT, IssueName TEXT, CV_ArcID TEXT, Int_IssueNumber INT, DynamicComicName TEXT, Volume TEXT, Manual TEXT, DateAdded TEXT, DigitalDate TEXT, Type TEXT, Aliases TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS ddl_info (ID TEXT UNIQUE, series TEXT, year TEXT, filename TEXT, size TEXT, issueid TEXT, comicid TEXT, link TEXT, status TEXT, remote_filesize TEXT, updated_date TEXT, mainlink TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS ddl_info (ID TEXT UNIQUE, series TEXT, year TEXT, filename TEXT, size TEXT, issueid TEXT, comicid TEXT, link TEXT, status TEXT, remote_filesize TEXT, updated_date TEXT, mainlink TEXT, issues TEXT)')
conn.commit
c.close
@ -1115,6 +1273,11 @@ def dbcheck():
except sqlite3.OperationalError:
c.execute('ALTER TABLE ddl_info ADD COLUMN mainlink TEXT')
try:
c.execute('SELECT issues from ddl_info')
except sqlite3.OperationalError:
c.execute('ALTER TABLE ddl_info ADD COLUMN issues TEXT')
#if it's prior to Wednesday, the issue counts will be inflated by one as the online db's everywhere
#prepare for the next 'new' release of a series. It's caught in updater.py, so let's just store the
#value in the sql so we can display it in the details screen for everyone to wonder at.
@ -1230,61 +1393,20 @@ def halt():
logger.info('Shutting down the background schedulers...')
SCHED.shutdown(wait=False)
if NZBPOOL is not None:
logger.info('Terminating the nzb auto-complete thread.')
try:
NZBPOOL.join(10)
logger.info('Joined pool for termination - successful')
except KeyboardInterrupt:
NZB_QUEUE.put('exit')
NZBPOOL.join(5)
except AssertionError:
os._exit(0)
queue_schedule('all', 'shutdown')
#if NZBPOOL is not None:
# queue_schedule('nzb_queue', 'shutdown')
#if SNPOOL is not None:
# queue_schedule('snatched_queue', 'shutdown')
if SNPOOL is not None:
logger.info('Terminating the auto-snatch thread.')
try:
SNPOOL.join(10)
logger.info('Joined pool for termination - successful')
except KeyboardInterrupt:
SNATCHED_QUEUE.put('exit')
SNPOOL.join(5)
except AssertionError:
os._exit(0)
#if SEARCHPOOL is not None:
# queue_schedule('search_queue', 'shutdown')
#if PPPOOL is not None:
# queue_schedule('pp_queue', 'shutdown')
if SEARCHPOOL is not None:
logger.info('Terminating the search queue thread.')
try:
SEARCHPOOL.join(10)
logger.info('Joined pool for termination - successful')
except KeyboardInterrupt:
SEARCH_QUEUE.put('exit')
SEARCHPOOL.join(5)
except AssertionError:
os._exit(0)
if PPPOOL is not None:
logger.info('Terminating the post-processing queue thread.')
try:
PPPOOL.join(10)
logger.info('Joined pool for termination - successful')
except KeyboardInterrupt:
PP_QUEUE.put('exit')
PPPOOL.join(5)
except AssertionError:
os._exit(0)
if DDLPOOL is not None:
logger.info('Terminating the DDL download queue thread.')
try:
DDLPOOL.join(10)
logger.info('Joined pool for termination - successful')
except KeyboardInterrupt:
DDL_QUEUE.put('exit')
DDLPOOL.join(5)
except AssertionError:
os._exit(0)
#if DDLPOOL is not None:
# queue_schedule('ddl_queue', 'shutdown')
_INITIALIZED = False

View File

@ -535,7 +535,7 @@ class Config(object):
print('Logging level over-ridden by startup value. Changing from %s to %s' % (self.LOG_LEVEL, mylar.LOG_LEVEL))
logger.mylar_log.initLogger(loglevel=mylar.LOG_LEVEL, log_dir=self.LOG_DIR, max_logsize=self.MAX_LOGSIZE, max_logfiles=self.MAX_LOGFILES)
self.configure()
self.configure(startup=startup)
return self
def config_update(self):
@ -741,7 +741,7 @@ class Config(object):
except IOError as e:
logger.warn("Error writing configuration file: %s", e)
def configure(self, update=False):
def configure(self, update=False, startup=False):
#force alt_pull = 2 on restarts regardless of settings
if self.ALT_PULL != 2:
@ -903,22 +903,16 @@ class Config(object):
logger.fdebug('Successfully created ComicTagger Settings location.')
#make sure queues are running here...
if all([mylar.NZBPOOL is None, self.POST_PROCESSING is True]) and ( all([self.NZB_DOWNLOADER == 0, self.SAB_CLIENT_POST_PROCESSING is True]) or all([self.NZB_DOWNLOADER == 1, self.NZBGET_CLIENT_POST_PROCESSING is True]) ):
if self.NZB_DOWNLOADER == 0:
logger.info('[SAB-MONITOR] Completed post-processing handling enabled for SABnzbd. Attempting to background load....')
elif self.NZB_DOWNLOADER == 1:
logger.info('[NZBGET-MONITOR] Completed post-processing handling enabled for NZBGet. Attempting to background load....')
mylar.NZBPOOL = threading.Thread(target=helpers.nzb_monitor, args=(mylar.NZB_QUEUE,), name="AUTO-COMPLETE-NZB")
mylar.NZBPOOL.start()
if self.NZB_DOWNLOADER == 0:
logger.info('[AUTO-COMPLETE-NZB] Succesfully started Completed post-processing handling for SABnzbd - will now monitor for completed nzbs within sabnzbd and post-process automatically...')
elif self.NZB_DOWNLOADER == 1:
logger.info('[AUTO-COMPLETE-NZB] Succesfully started Completed post-processing handling for NZBGet - will now monitor for completed nzbs within nzbget and post-process automatically...')
if startup is False:
if self.POST_PROCESSING is True and ( all([self.NZB_DOWNLOADER == 0, self.SAB_CLIENT_POST_PROCESSING is True]) or all([self.NZB_DOWNLOADER == 1, self.NZBGET_CLIENT_POST_PROCESSING is True]) ):
mylar.queue_schedule('nzb_queue', 'start')
elif self.POST_PROCESSING is True and ( all([self.NZB_DOWNLOADER == 0, self.SAB_CLIENT_POST_PROCESSING is False]) or all([self.NZB_DOWNLOADER == 1, self.NZBGET_CLIENT_POST_PROCESSING is False]) ):
mylar.queue_schedule('nzb_queue', 'stop')
if all([mylar.DDLPOOL is None, self.ENABLE_DDL is True]):
mylar.DDLPOOL = threading.Thread(target=helpers.ddl_downloader, args=(mylar.DDL_QUEUE,), name='DDL-QUEUE')
mylar.DDLPOOL.start()
logger.info('[DDL-QUEUE] Succesfully started DDL Download Queuer....')
if self.ENABLE_DDL is True:
mylar.queue_schedule('ddl_queue', 'start')
elif self.ENABLE_DDL is False:
mylar.queue_schedule('ddl_queue', 'stop')
if not self.DDL_LOCATION:
self.DDL_LOCATION = self.CACHE_DIR

View File

@ -3038,10 +3038,10 @@ def ddl_downloader(queue):
elif mylar.DDL_LOCK is False and queue.qsize() >= 1:
item = queue.get(True)
logger.info('Now loading request from DDL queue: %s' % item['series'])
if item == 'exit':
logger.info('Cleaning up workers for shutdown')
break
logger.info('Now loading request from DDL queue: %s' % item['series'])
#write this to the table so we have a record of what's going on.
ctrlval = {'id': item['id']}
@ -3124,11 +3124,11 @@ def search_queue(queue):
elif mylar.SEARCHLOCK is False and queue.qsize() >= 1: #len(queue) > 1:
item = queue.get(True)
logger.info('[SEARCH-QUEUE] Now loading item from search queue: %s' % item)
if item == 'exit':
logger.info('[SEARCH-QUEUE] Cleaning up workers for shutdown')
break
logger.info('[SEARCH-QUEUE] Now loading item from search queue: %s' % item)
if mylar.SEARCHLOCK is False:
ss_queue = mylar.search.searchforissue(item['issueid'])
time.sleep(5) #arbitrary sleep to let the process attempt to finish pp'ing
@ -3142,63 +3142,66 @@ def search_queue(queue):
def worker_main(queue):
while True:
item = queue.get(True)
logger.info('Now loading from queue: ' + item)
if item == 'exit':
logger.info('Cleaning up workers for shutdown')
break
snstat = torrentinfo(torrent_hash=item, download=True)
if snstat['snatch_status'] == 'IN PROGRESS':
logger.info('Still downloading in client....let us try again momentarily.')
time.sleep(30)
mylar.SNATCHED_QUEUE.put(item)
elif any([snstat['snatch_status'] == 'MONITOR FAIL', snstat['snatch_status'] == 'MONITOR COMPLETE']):
logger.info('File copied for post-processing - submitting as a direct pp.')
threading.Thread(target=self.checkFolder, args=[os.path.abspath(os.path.join(snstat['copied_filepath'], os.pardir))]).start()
time.sleep(15)
if queue.qsize() >= 1:
item = queue.get(True)
logger.info('Now loading from queue: ' + item)
if item == 'exit':
logger.info('Cleaning up workers for shutdown')
break
snstat = torrentinfo(torrent_hash=item, download=True)
if snstat['snatch_status'] == 'IN PROGRESS':
logger.info('Still downloading in client....let us try again momentarily.')
time.sleep(30)
mylar.SNATCHED_QUEUE.put(item)
elif any([snstat['snatch_status'] == 'MONITOR FAIL', snstat['snatch_status'] == 'MONITOR COMPLETE']):
logger.info('File copied for post-processing - submitting as a direct pp.')
threading.Thread(target=self.checkFolder, args=[os.path.abspath(os.path.join(snstat['copied_filepath'], os.pardir))]).start()
else:
time.sleep(15)
def nzb_monitor(queue):
while True:
item = queue.get(True)
logger.info('Now loading from queue: %s' % item)
if item == 'exit':
logger.info('Cleaning up workers for shutdown')
break
if all([mylar.USE_SABNZBD is True, mylar.CONFIG.SAB_CLIENT_POST_PROCESSING is True]):
nz = sabnzbd.SABnzbd(item)
nzstat = nz.processor()
elif all([mylar.USE_NZBGET is True, mylar.CONFIG.NZBGET_CLIENT_POST_PROCESSING is True]):
nz = nzbget.NZBGet()
nzstat = nz.processor(item)
else:
logger.warn('There are no NZB Completed Download handlers enabled. Not sending item to completed download handling...')
break
if any([nzstat['status'] == 'file not found', nzstat['status'] == 'double-pp']):
logger.warn('Unable to complete post-processing call due to not finding file in the location provided. [%s]' % item)
elif nzstat['status'] is False:
logger.info('Could not find NZBID %s in the downloader\'s queue. I will requeue this item for post-processing...' % item['NZBID'])
time.sleep(5)
mylar.NZB_QUEUE.put(item)
elif nzstat['status'] is True:
if nzstat['failed'] is False:
logger.info('File successfully downloaded - now initiating completed downloading handling.')
if queue.qsize() >= 1:
item = queue.get(True)
if item == 'exit':
logger.info('Cleaning up workers for shutdown')
break
logger.info('Now loading from queue: %s' % item)
if all([mylar.USE_SABNZBD is True, mylar.CONFIG.SAB_CLIENT_POST_PROCESSING is True]):
nz = sabnzbd.SABnzbd(item)
nzstat = nz.processor()
elif all([mylar.USE_NZBGET is True, mylar.CONFIG.NZBGET_CLIENT_POST_PROCESSING is True]):
nz = nzbget.NZBGet()
nzstat = nz.processor(item)
else:
logger.info('File failed either due to being corrupt or incomplete - now initiating completed failed downloading handling.')
try:
mylar.PP_QUEUE.put({'nzb_name': nzstat['name'],
'nzb_folder': nzstat['location'],
'failed': nzstat['failed'],
'issueid': nzstat['issueid'],
'comicid': nzstat['comicid'],
'apicall': nzstat['apicall'],
'ddl': False})
#cc = process.Process(nzstat['name'], nzstat['location'], failed=nzstat['failed'])
#nzpp = cc.post_process()
except Exception as e:
logger.info('process error: %s' % e)
logger.warn('There are no NZB Completed Download handlers enabled. Not sending item to completed download handling...')
break
time.sleep(5)
if any([nzstat['status'] == 'file not found', nzstat['status'] == 'double-pp']):
logger.warn('Unable to complete post-processing call due to not finding file in the location provided. [%s]' % item)
elif nzstat['status'] is False:
logger.info('Could not find NZBID %s in the downloader\'s queue. I will requeue this item for post-processing...' % item['NZBID'])
time.sleep(5)
mylar.NZB_QUEUE.put(item)
elif nzstat['status'] is True:
if nzstat['failed'] is False:
logger.info('File successfully downloaded - now initiating completed downloading handling.')
else:
logger.info('File failed either due to being corrupt or incomplete - now initiating completed failed downloading handling.')
try:
mylar.PP_QUEUE.put({'nzb_name': nzstat['name'],
'nzb_folder': nzstat['location'],
'failed': nzstat['failed'],
'issueid': nzstat['issueid'],
'comicid': nzstat['comicid'],
'apicall': nzstat['apicall'],
'ddl': False})
#cc = process.Process(nzstat['name'], nzstat['location'], failed=nzstat['failed'])
#nzpp = cc.post_process()
except Exception as e:
logger.info('process error: %s' % e)
else:
time.sleep(5)
def script_env(mode, vars):
#mode = on-snatch, pre-postprocess, post-postprocess

View File

@ -4068,7 +4068,7 @@ class WebInterface(object):
mylar.CONFIG.IMP_METADATA = bool(imp_metadata)
mylar.CONFIG.IMP_PATHS = bool(imp_paths)
mylar.CONFIG.configure(update=True)
mylar.CONFIG.configure(update=True, startup=False)
# Write the config
logger.info('Now updating config...')
mylar.CONFIG.writeconfig()