1
0
Fork 0
mirror of https://github.com/morpheus65535/bazarr synced 2024-12-27 01:57:33 +00:00
bazarr/libs/apscheduler/executors/tornado.py
2021-10-21 18:36:45 -04:00

54 lines
1.7 KiB
Python

from __future__ import absolute_import
import sys
from concurrent.futures import ThreadPoolExecutor
from tornado.gen import convert_yielded
from apscheduler.executors.base import BaseExecutor, run_job
try:
from apscheduler.executors.base_py3 import run_coroutine_job
from apscheduler.util import iscoroutinefunction_partial
except ImportError:
def iscoroutinefunction_partial(func):
return False
class TornadoExecutor(BaseExecutor):
"""
Runs jobs either in a thread pool or directly on the I/O loop.
If the job function is a native coroutine function, it is scheduled to be run directly in the
I/O loop as soon as possible. All other functions are run in a thread pool.
Plugin alias: ``tornado``
:param int max_workers: maximum number of worker threads in the thread pool
"""
def __init__(self, max_workers=10):
super(TornadoExecutor, self).__init__()
self.executor = ThreadPoolExecutor(max_workers)
def start(self, scheduler, alias):
super(TornadoExecutor, self).start(scheduler, alias)
self._ioloop = scheduler._ioloop
def _do_submit_job(self, job, run_times):
def callback(f):
try:
events = f.result()
except BaseException:
self._run_job_error(job.id, *sys.exc_info()[1:])
else:
self._run_job_success(job.id, events)
if iscoroutinefunction_partial(job.func):
f = run_coroutine_job(job, job._jobstore_alias, run_times, self._logger.name)
else:
f = self.executor.submit(run_job, job, job._jobstore_alias, run_times,
self._logger.name)
f = convert_yielded(f)
f.add_done_callback(callback)