diff --git a/Mylar.py b/Mylar.py index ab6e7f15..73e9bc8e 100644 --- a/Mylar.py +++ b/Mylar.py @@ -63,7 +63,7 @@ def main(): mylar.SYS_ENCODING = 'UTF-8' # Set up and gather command line arguments - parser = argparse.ArgumentParser(description='Comic Book add-on for SABnzbd+') + parser = argparse.ArgumentParser(description='Automated Comic Book Downloader') parser.add_argument('-v', '--verbose', action='store_true', help='Increase console logging verbosity') parser.add_argument('-q', '--quiet', action='store_true', help='Turn off console logging') diff --git a/data/interfaces/default/comicdetails.html b/data/interfaces/default/comicdetails.html old mode 100644 new mode 100755 index 4114edbf..ebf9581d --- a/data/interfaces/default/comicdetails.html +++ b/data/interfaces/default/comicdetails.html @@ -24,7 +24,7 @@ %if mylar.RENAME_FILES: Rename Files %endif - Recheck Files + Recheck Files %if mylar.ENABLE_META: Manual MetaTagging %endif @@ -299,7 +299,7 @@
Mark selected issues as - @@ -414,7 +414,7 @@ %else: - + %endif Download Manually check for issues
@@ -90,16 +93,14 @@ %if pullfilter is True: ${weekly['PUBLISHER']} - %if weekly['HAVEIT'] == 'No': - %if weekly['COMICID'] != '' and weekly['COMICID'] is not None: + %if any([weekly['HAVEIT'] == 'No', weekly['HAVEIT'] == 'OneOff']): + %if any([weekly['COMICID'] != '', weekly['COMICID'] is not None]): ${weekly['COMIC']} %else: ${weekly['COMIC']} %endif - %elif weekly['HAVEIT'] == 'OneOff': - ${weekly['COMIC']} %else: - ${weekly['COMIC']} + ${weekly['COMIC']} %endif ${weekly['ISSUE']} diff --git a/lib/apscheduler/__init__.py b/lib/apscheduler/__init__.py index 11e93a1d..89965478 100644 --- a/lib/apscheduler/__init__.py +++ b/lib/apscheduler/__init__.py @@ -1,3 +1,8 @@ -version_info = (2, 0, 0) -version = '.'.join(str(n) for n in version_info[:3]) -release = version + ''.join(str(n) for n in version_info[3:]) +# These will be removed in APScheduler 4.0. +#release = __import__('pkg_resources').get_distribution('APScheduler').version.split('-')[0] +#version_info = tuple(int(x) if x.isdigit() else x for x in release.split('.')) +#version = __version__ = '.'.join(str(x) for x in version_info[:3]) + +version_info = (3, 3, 1) +release = '3.3.1' +version = __version__ = '3.3.1' diff --git a/lib/apscheduler/events.py b/lib/apscheduler/events.py index 80bde8e6..890763eb 100644 --- a/lib/apscheduler/events.py +++ b/lib/apscheduler/events.py @@ -1,63 +1,93 @@ -__all__ = ('EVENT_SCHEDULER_START', 'EVENT_SCHEDULER_SHUTDOWN', - 'EVENT_JOBSTORE_ADDED', 'EVENT_JOBSTORE_REMOVED', - 'EVENT_JOBSTORE_JOB_ADDED', 'EVENT_JOBSTORE_JOB_REMOVED', - 'EVENT_JOB_EXECUTED', 'EVENT_JOB_ERROR', 'EVENT_JOB_MISSED', - 'EVENT_ALL', 'SchedulerEvent', 'JobStoreEvent', 'JobEvent') +__all__ = ('EVENT_SCHEDULER_STARTED', 'EVENT_SCHEDULER_SHUTDOWN', 'EVENT_SCHEDULER_PAUSED', + 'EVENT_SCHEDULER_RESUMED', 'EVENT_EXECUTOR_ADDED', 'EVENT_EXECUTOR_REMOVED', + 'EVENT_JOBSTORE_ADDED', 'EVENT_JOBSTORE_REMOVED', 'EVENT_ALL_JOBS_REMOVED', + 'EVENT_JOB_ADDED', 'EVENT_JOB_REMOVED', 'EVENT_JOB_MODIFIED', 'EVENT_JOB_EXECUTED', + 'EVENT_JOB_ERROR', 'EVENT_JOB_MISSED', 'EVENT_JOB_SUBMITTED', 'EVENT_JOB_MAX_INSTANCES', + 'SchedulerEvent', 'JobEvent', 'JobExecutionEvent') -EVENT_SCHEDULER_START = 1 # The scheduler was started -EVENT_SCHEDULER_SHUTDOWN = 2 # The scheduler was shut down -EVENT_JOBSTORE_ADDED = 4 # A job store was added to the scheduler -EVENT_JOBSTORE_REMOVED = 8 # A job store was removed from the scheduler -EVENT_JOBSTORE_JOB_ADDED = 16 # A job was added to a job store -EVENT_JOBSTORE_JOB_REMOVED = 32 # A job was removed from a job store -EVENT_JOB_EXECUTED = 64 # A job was executed successfully -EVENT_JOB_ERROR = 128 # A job raised an exception during execution -EVENT_JOB_MISSED = 256 # A job's execution was missed -EVENT_ALL = (EVENT_SCHEDULER_START | EVENT_SCHEDULER_SHUTDOWN | - EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED | - EVENT_JOBSTORE_JOB_ADDED | EVENT_JOBSTORE_JOB_REMOVED | - EVENT_JOB_EXECUTED | EVENT_JOB_ERROR | EVENT_JOB_MISSED) +EVENT_SCHEDULER_STARTED = EVENT_SCHEDULER_START = 2 ** 0 +EVENT_SCHEDULER_SHUTDOWN = 2 ** 1 +EVENT_SCHEDULER_PAUSED = 2 ** 2 +EVENT_SCHEDULER_RESUMED = 2 ** 3 +EVENT_EXECUTOR_ADDED = 2 ** 4 +EVENT_EXECUTOR_REMOVED = 2 ** 5 +EVENT_JOBSTORE_ADDED = 2 ** 6 +EVENT_JOBSTORE_REMOVED = 2 ** 7 +EVENT_ALL_JOBS_REMOVED = 2 ** 8 +EVENT_JOB_ADDED = 2 ** 9 +EVENT_JOB_REMOVED = 2 ** 10 +EVENT_JOB_MODIFIED = 2 ** 11 +EVENT_JOB_EXECUTED = 2 ** 12 +EVENT_JOB_ERROR = 2 ** 13 +EVENT_JOB_MISSED = 2 ** 14 +EVENT_JOB_SUBMITTED = 2 ** 15 +EVENT_JOB_MAX_INSTANCES = 2 ** 16 +EVENT_ALL = (EVENT_SCHEDULER_STARTED | EVENT_SCHEDULER_SHUTDOWN | EVENT_SCHEDULER_PAUSED | + EVENT_SCHEDULER_RESUMED | EVENT_EXECUTOR_ADDED | EVENT_EXECUTOR_REMOVED | + EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED | EVENT_ALL_JOBS_REMOVED | + EVENT_JOB_ADDED | EVENT_JOB_REMOVED | EVENT_JOB_MODIFIED | EVENT_JOB_EXECUTED | + EVENT_JOB_ERROR | EVENT_JOB_MISSED | EVENT_JOB_SUBMITTED | EVENT_JOB_MAX_INSTANCES) class SchedulerEvent(object): """ An event that concerns the scheduler itself. - :var code: the type code of this event + :ivar code: the type code of this event + :ivar alias: alias of the job store or executor that was added or removed (if applicable) """ - def __init__(self, code): + + def __init__(self, code, alias=None): + super(SchedulerEvent, self).__init__() self.code = code - - -class JobStoreEvent(SchedulerEvent): - """ - An event that concerns job stores. - - :var alias: the alias of the job store involved - :var job: the new job if a job was added - """ - def __init__(self, code, alias, job=None): - SchedulerEvent.__init__(self, code) self.alias = alias - if job: - self.job = job + + def __repr__(self): + return '<%s (code=%d)>' % (self.__class__.__name__, self.code) class JobEvent(SchedulerEvent): """ - An event that concerns the execution of individual jobs. + An event that concerns a job. - :var job: the job instance in question - :var scheduled_run_time: the time when the job was scheduled to be run - :var retval: the return value of the successfully executed job - :var exception: the exception raised by the job - :var traceback: the traceback object associated with the exception + :ivar code: the type code of this event + :ivar job_id: identifier of the job in question + :ivar jobstore: alias of the job store containing the job in question """ - def __init__(self, code, job, scheduled_run_time, retval=None, - exception=None, traceback=None): - SchedulerEvent.__init__(self, code) - self.job = job + + def __init__(self, code, job_id, jobstore): + super(JobEvent, self).__init__(code) + self.code = code + self.job_id = job_id + self.jobstore = jobstore + + +class JobSubmissionEvent(JobEvent): + """ + An event that concerns the submission of a job to its executor. + + :ivar scheduled_run_times: a list of datetimes when the job was intended to run + """ + + def __init__(self, code, job_id, jobstore, scheduled_run_times): + super(JobSubmissionEvent, self).__init__(code, job_id, jobstore) + self.scheduled_run_times = scheduled_run_times + + +class JobExecutionEvent(JobEvent): + """ + An event that concerns the running of a job within its executor. + + :ivar scheduled_run_time: the time when the job was scheduled to be run + :ivar retval: the return value of the successfully executed job + :ivar exception: the exception raised by the job + :ivar traceback: a formatted traceback for the exception + """ + + def __init__(self, code, job_id, jobstore, scheduled_run_time, retval=None, exception=None, + traceback=None): + super(JobExecutionEvent, self).__init__(code, job_id, jobstore) self.scheduled_run_time = scheduled_run_time self.retval = retval self.exception = exception diff --git a/lib/apscheduler/executors/__init__.py b/lib/apscheduler/executors/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/lib/apscheduler/executors/asyncio.py b/lib/apscheduler/executors/asyncio.py new file mode 100644 index 00000000..44794295 --- /dev/null +++ b/lib/apscheduler/executors/asyncio.py @@ -0,0 +1,49 @@ +from __future__ import absolute_import + +import sys + +from apscheduler.executors.base import BaseExecutor, run_job + +try: + from asyncio import iscoroutinefunction + from apscheduler.executors.base_py3 import run_coroutine_job +except ImportError: + from trollius import iscoroutinefunction + run_coroutine_job = None + + +class AsyncIOExecutor(BaseExecutor): + """ + Runs jobs in the default executor of the event loop. + + If the job function is a native coroutine function, it is scheduled to be run directly in the + event loop as soon as possible. All other functions are run in the event loop's default + executor which is usually a thread pool. + + Plugin alias: ``asyncio`` + """ + + def start(self, scheduler, alias): + super(AsyncIOExecutor, self).start(scheduler, alias) + self._eventloop = scheduler._eventloop + + def _do_submit_job(self, job, run_times): + def callback(f): + try: + events = f.result() + except: + self._run_job_error(job.id, *sys.exc_info()[1:]) + else: + self._run_job_success(job.id, events) + + if iscoroutinefunction(job.func): + if run_coroutine_job is not None: + coro = run_coroutine_job(job, job._jobstore_alias, run_times, self._logger.name) + f = self._eventloop.create_task(coro) + else: + raise Exception('Executing coroutine based jobs is not supported with Trollius') + else: + f = self._eventloop.run_in_executor(None, run_job, job, job._jobstore_alias, run_times, + self._logger.name) + + f.add_done_callback(callback) diff --git a/lib/apscheduler/executors/base.py b/lib/apscheduler/executors/base.py new file mode 100644 index 00000000..b36a86fc --- /dev/null +++ b/lib/apscheduler/executors/base.py @@ -0,0 +1,137 @@ +from abc import ABCMeta, abstractmethod +from collections import defaultdict +from datetime import datetime, timedelta +from traceback import format_tb +import logging +import sys + +from pytz import utc +import six + +from apscheduler.events import ( + JobExecutionEvent, EVENT_JOB_MISSED, EVENT_JOB_ERROR, EVENT_JOB_EXECUTED) + + +class MaxInstancesReachedError(Exception): + def __init__(self, job): + super(MaxInstancesReachedError, self).__init__( + 'Job "%s" has already reached its maximum number of instances (%d)' % + (job.id, job.max_instances)) + + +class BaseExecutor(six.with_metaclass(ABCMeta, object)): + """Abstract base class that defines the interface that every executor must implement.""" + + _scheduler = None + _lock = None + _logger = logging.getLogger('apscheduler.executors') + + def __init__(self): + super(BaseExecutor, self).__init__() + self._instances = defaultdict(lambda: 0) + + def start(self, scheduler, alias): + """ + Called by the scheduler when the scheduler is being started or when the executor is being + added to an already running scheduler. + + :param apscheduler.schedulers.base.BaseScheduler scheduler: the scheduler that is starting + this executor + :param str|unicode alias: alias of this executor as it was assigned to the scheduler + + """ + self._scheduler = scheduler + self._lock = scheduler._create_lock() + self._logger = logging.getLogger('apscheduler.executors.%s' % alias) + + def shutdown(self, wait=True): + """ + Shuts down this executor. + + :param bool wait: ``True`` to wait until all submitted jobs + have been executed + """ + + def submit_job(self, job, run_times): + """ + Submits job for execution. + + :param Job job: job to execute + :param list[datetime] run_times: list of datetimes specifying + when the job should have been run + :raises MaxInstancesReachedError: if the maximum number of + allowed instances for this job has been reached + + """ + assert self._lock is not None, 'This executor has not been started yet' + with self._lock: + if self._instances[job.id] >= job.max_instances: + raise MaxInstancesReachedError(job) + + self._do_submit_job(job, run_times) + self._instances[job.id] += 1 + + @abstractmethod + def _do_submit_job(self, job, run_times): + """Performs the actual task of scheduling `run_job` to be called.""" + + def _run_job_success(self, job_id, events): + """ + Called by the executor with the list of generated events when :func:`run_job` has been + successfully called. + + """ + with self._lock: + self._instances[job_id] -= 1 + if self._instances[job_id] == 0: + del self._instances[job_id] + + for event in events: + self._scheduler._dispatch_event(event) + + def _run_job_error(self, job_id, exc, traceback=None): + """Called by the executor with the exception if there is an error calling `run_job`.""" + with self._lock: + self._instances[job_id] -= 1 + if self._instances[job_id] == 0: + del self._instances[job_id] + + exc_info = (exc.__class__, exc, traceback) + self._logger.error('Error running job %s', job_id, exc_info=exc_info) + + +def run_job(job, jobstore_alias, run_times, logger_name): + """ + Called by executors to run the job. Returns a list of scheduler events to be dispatched by the + scheduler. + + """ + events = [] + logger = logging.getLogger(logger_name) + for run_time in run_times: + # See if the job missed its run time window, and handle + # possible misfires accordingly + if job.misfire_grace_time is not None: + difference = datetime.now(utc) - run_time + grace_time = timedelta(seconds=job.misfire_grace_time) + if difference > grace_time: + events.append(JobExecutionEvent(EVENT_JOB_MISSED, job.id, jobstore_alias, + run_time)) + logger.warning('Run time of job "%s" was missed by %s', job, difference) + continue + + logger.info('Running job "%s" (scheduled at %s)', job, run_time) + try: + retval = job.func(*job.args, **job.kwargs) + except: + exc, tb = sys.exc_info()[1:] + formatted_tb = ''.join(format_tb(tb)) + events.append(JobExecutionEvent(EVENT_JOB_ERROR, job.id, jobstore_alias, run_time, + exception=exc, traceback=formatted_tb)) + logger.exception('Job "%s" raised an exception', job) + else: + events.append(JobExecutionEvent(EVENT_JOB_EXECUTED, job.id, jobstore_alias, run_time, + retval=retval)) + logger.info('Job "%s" executed successfully', job) + + return events diff --git a/lib/apscheduler/executors/base_py3.py b/lib/apscheduler/executors/base_py3.py new file mode 100644 index 00000000..47124258 --- /dev/null +++ b/lib/apscheduler/executors/base_py3.py @@ -0,0 +1,41 @@ +import logging +import sys +from datetime import datetime, timedelta +from traceback import format_tb + +from pytz import utc + +from apscheduler.events import ( + JobExecutionEvent, EVENT_JOB_MISSED, EVENT_JOB_ERROR, EVENT_JOB_EXECUTED) + + +async def run_coroutine_job(job, jobstore_alias, run_times, logger_name): + """Coroutine version of run_job().""" + events = [] + logger = logging.getLogger(logger_name) + for run_time in run_times: + # See if the job missed its run time window, and handle possible misfires accordingly + if job.misfire_grace_time is not None: + difference = datetime.now(utc) - run_time + grace_time = timedelta(seconds=job.misfire_grace_time) + if difference > grace_time: + events.append(JobExecutionEvent(EVENT_JOB_MISSED, job.id, jobstore_alias, + run_time)) + logger.warning('Run time of job "%s" was missed by %s', job, difference) + continue + + logger.info('Running job "%s" (scheduled at %s)', job, run_time) + try: + retval = await job.func(*job.args, **job.kwargs) + except: + exc, tb = sys.exc_info()[1:] + formatted_tb = ''.join(format_tb(tb)) + events.append(JobExecutionEvent(EVENT_JOB_ERROR, job.id, jobstore_alias, run_time, + exception=exc, traceback=formatted_tb)) + logger.exception('Job "%s" raised an exception', job) + else: + events.append(JobExecutionEvent(EVENT_JOB_EXECUTED, job.id, jobstore_alias, run_time, + retval=retval)) + logger.info('Job "%s" executed successfully', job) + + return events diff --git a/lib/apscheduler/executors/debug.py b/lib/apscheduler/executors/debug.py new file mode 100644 index 00000000..f6454d52 --- /dev/null +++ b/lib/apscheduler/executors/debug.py @@ -0,0 +1,20 @@ +import sys + +from apscheduler.executors.base import BaseExecutor, run_job + + +class DebugExecutor(BaseExecutor): + """ + A special executor that executes the target callable directly instead of deferring it to a + thread or process. + + Plugin alias: ``debug`` + """ + + def _do_submit_job(self, job, run_times): + try: + events = run_job(job, job._jobstore_alias, run_times, self._logger.name) + except: + self._run_job_error(job.id, *sys.exc_info()[1:]) + else: + self._run_job_success(job.id, events) diff --git a/lib/apscheduler/executors/gevent.py b/lib/apscheduler/executors/gevent.py new file mode 100644 index 00000000..a12b806a --- /dev/null +++ b/lib/apscheduler/executors/gevent.py @@ -0,0 +1,30 @@ +from __future__ import absolute_import +import sys + +from apscheduler.executors.base import BaseExecutor, run_job + + +try: + import gevent +except ImportError: # pragma: nocover + raise ImportError('GeventExecutor requires gevent installed') + + +class GeventExecutor(BaseExecutor): + """ + Runs jobs as greenlets. + + Plugin alias: ``gevent`` + """ + + def _do_submit_job(self, job, run_times): + def callback(greenlet): + try: + events = greenlet.get() + except: + self._run_job_error(job.id, *sys.exc_info()[1:]) + else: + self._run_job_success(job.id, events) + + gevent.spawn(run_job, job, job._jobstore_alias, run_times, self._logger.name).\ + link(callback) diff --git a/lib/apscheduler/executors/pool.py b/lib/apscheduler/executors/pool.py new file mode 100644 index 00000000..2f4ef455 --- /dev/null +++ b/lib/apscheduler/executors/pool.py @@ -0,0 +1,54 @@ +from abc import abstractmethod +import concurrent.futures + +from apscheduler.executors.base import BaseExecutor, run_job + + +class BasePoolExecutor(BaseExecutor): + @abstractmethod + def __init__(self, pool): + super(BasePoolExecutor, self).__init__() + self._pool = pool + + def _do_submit_job(self, job, run_times): + def callback(f): + exc, tb = (f.exception_info() if hasattr(f, 'exception_info') else + (f.exception(), getattr(f.exception(), '__traceback__', None))) + if exc: + self._run_job_error(job.id, exc, tb) + else: + self._run_job_success(job.id, f.result()) + + f = self._pool.submit(run_job, job, job._jobstore_alias, run_times, self._logger.name) + f.add_done_callback(callback) + + def shutdown(self, wait=True): + self._pool.shutdown(wait) + + +class ThreadPoolExecutor(BasePoolExecutor): + """ + An executor that runs jobs in a concurrent.futures thread pool. + + Plugin alias: ``threadpool`` + + :param max_workers: the maximum number of spawned threads. + """ + + def __init__(self, max_workers=10): + pool = concurrent.futures.ThreadPoolExecutor(int(max_workers)) + super(ThreadPoolExecutor, self).__init__(pool) + + +class ProcessPoolExecutor(BasePoolExecutor): + """ + An executor that runs jobs in a concurrent.futures process pool. + + Plugin alias: ``processpool`` + + :param max_workers: the maximum number of spawned processes. + """ + + def __init__(self, max_workers=10): + pool = concurrent.futures.ProcessPoolExecutor(int(max_workers)) + super(ProcessPoolExecutor, self).__init__(pool) diff --git a/lib/apscheduler/executors/tornado.py b/lib/apscheduler/executors/tornado.py new file mode 100644 index 00000000..6519c3e8 --- /dev/null +++ b/lib/apscheduler/executors/tornado.py @@ -0,0 +1,54 @@ +from __future__ import absolute_import + +import sys +from concurrent.futures import ThreadPoolExecutor + +from tornado.gen import convert_yielded + +from apscheduler.executors.base import BaseExecutor, run_job + +try: + from inspect import iscoroutinefunction + from apscheduler.executors.base_py3 import run_coroutine_job +except ImportError: + def iscoroutinefunction(func): + return False + + +class TornadoExecutor(BaseExecutor): + """ + Runs jobs either in a thread pool or directly on the I/O loop. + + If the job function is a native coroutine function, it is scheduled to be run directly in the + I/O loop as soon as possible. All other functions are run in a thread pool. + + Plugin alias: ``tornado`` + + :param int max_workers: maximum number of worker threads in the thread pool + """ + + def __init__(self, max_workers=10): + super(TornadoExecutor, self).__init__() + self.executor = ThreadPoolExecutor(max_workers) + + def start(self, scheduler, alias): + super(TornadoExecutor, self).start(scheduler, alias) + self._ioloop = scheduler._ioloop + + def _do_submit_job(self, job, run_times): + def callback(f): + try: + events = f.result() + except: + self._run_job_error(job.id, *sys.exc_info()[1:]) + else: + self._run_job_success(job.id, events) + + if iscoroutinefunction(job.func): + f = run_coroutine_job(job, job._jobstore_alias, run_times, self._logger.name) + else: + f = self.executor.submit(run_job, job, job._jobstore_alias, run_times, + self._logger.name) + + f = convert_yielded(f) + f.add_done_callback(callback) diff --git a/lib/apscheduler/executors/twisted.py b/lib/apscheduler/executors/twisted.py new file mode 100644 index 00000000..c7bcf647 --- /dev/null +++ b/lib/apscheduler/executors/twisted.py @@ -0,0 +1,25 @@ +from __future__ import absolute_import + +from apscheduler.executors.base import BaseExecutor, run_job + + +class TwistedExecutor(BaseExecutor): + """ + Runs jobs in the reactor's thread pool. + + Plugin alias: ``twisted`` + """ + + def start(self, scheduler, alias): + super(TwistedExecutor, self).start(scheduler, alias) + self._reactor = scheduler._reactor + + def _do_submit_job(self, job, run_times): + def callback(success, result): + if success: + self._run_job_success(job.id, result) + else: + self._run_job_error(job.id, result.value, result.tb) + + self._reactor.getThreadPool().callInThreadWithCallback( + callback, run_job, job, job._jobstore_alias, run_times, self._logger.name) diff --git a/lib/apscheduler/job.py b/lib/apscheduler/job.py index 868e7234..b9c305db 100644 --- a/lib/apscheduler/job.py +++ b/lib/apscheduler/job.py @@ -1,134 +1,289 @@ -""" -Jobs represent scheduled tasks. -""" +from collections import Iterable, Mapping +from uuid import uuid4 -from threading import Lock -from datetime import timedelta +import six -from apscheduler.util import to_unicode, ref_to_obj, get_callable_name,\ - obj_to_ref - - -class MaxInstancesReachedError(Exception): - pass +from apscheduler.triggers.base import BaseTrigger +from apscheduler.util import ( + ref_to_obj, obj_to_ref, datetime_repr, repr_escape, get_callable_name, check_callable_args, + convert_to_datetime) class Job(object): """ - Encapsulates the actual Job along with its metadata. Job instances - are created by the scheduler when adding jobs, and it should not be - directly instantiated. + Contains the options given when scheduling callables and its current schedule and other state. + This class should never be instantiated by the user. - :param trigger: trigger that determines the execution times - :param func: callable to call when the trigger is triggered - :param args: list of positional arguments to call func with - :param kwargs: dict of keyword arguments to call func with - :param name: name of the job (optional) - :param misfire_grace_time: seconds after the designated run time that - the job is still allowed to be run - :param coalesce: run once instead of many times if the scheduler determines - that the job should be run more than once in succession - :param max_runs: maximum number of times this job is allowed to be - triggered - :param max_instances: maximum number of concurrently running - instances allowed for this job + :var str id: the unique identifier of this job + :var str name: the description of this job + :var func: the callable to execute + :var tuple|list args: positional arguments to the callable + :var dict kwargs: keyword arguments to the callable + :var bool coalesce: whether to only run the job once when several run times are due + :var trigger: the trigger object that controls the schedule of this job + :var str executor: the name of the executor that will run this job + :var int misfire_grace_time: the time (in seconds) how much this job's execution is allowed to + be late + :var int max_instances: the maximum number of concurrently executing instances allowed for this + job + :var datetime.datetime next_run_time: the next scheduled run time of this job + + .. note:: + The ``misfire_grace_time`` has some non-obvious effects on job execution. See the + :ref:`missed-job-executions` section in the documentation for an in-depth explanation. """ - id = None - next_run_time = None - def __init__(self, trigger, func, args, kwargs, misfire_grace_time, - coalesce, name=None, max_runs=None, max_instances=1): - if not trigger: - raise ValueError('The trigger must not be None') - if not hasattr(func, '__call__'): - raise TypeError('func must be callable') - if not hasattr(args, '__getitem__'): - raise TypeError('args must be a list-like object') - if not hasattr(kwargs, '__getitem__'): - raise TypeError('kwargs must be a dict-like object') - if misfire_grace_time <= 0: - raise ValueError('misfire_grace_time must be a positive value') - if max_runs is not None and max_runs <= 0: - raise ValueError('max_runs must be a positive value') - if max_instances <= 0: - raise ValueError('max_instances must be a positive value') + __slots__ = ('_scheduler', '_jobstore_alias', 'id', 'trigger', 'executor', 'func', 'func_ref', + 'args', 'kwargs', 'name', 'misfire_grace_time', 'coalesce', 'max_instances', + 'next_run_time') - self._lock = Lock() + def __init__(self, scheduler, id=None, **kwargs): + super(Job, self).__init__() + self._scheduler = scheduler + self._jobstore_alias = None + self._modify(id=id or uuid4().hex, **kwargs) - self.trigger = trigger - self.func = func - self.args = args - self.kwargs = kwargs - self.name = to_unicode(name or get_callable_name(func)) - self.misfire_grace_time = misfire_grace_time - self.coalesce = coalesce - self.max_runs = max_runs - self.max_instances = max_instances - self.runs = 0 - self.instances = 0 - - def compute_next_run_time(self, now): - if self.runs == self.max_runs: - self.next_run_time = None - else: - self.next_run_time = self.trigger.get_next_fire_time(now) - - return self.next_run_time - - def get_run_times(self, now): + def modify(self, **changes): """ - Computes the scheduled run times between ``next_run_time`` and ``now``. + Makes the given changes to this job and saves it in the associated job store. + + Accepted keyword arguments are the same as the variables on this class. + + .. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.modify_job` + + :return Job: this job instance + + """ + self._scheduler.modify_job(self.id, self._jobstore_alias, **changes) + return self + + def reschedule(self, trigger, **trigger_args): + """ + Shortcut for switching the trigger on this job. + + .. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.reschedule_job` + + :return Job: this job instance + + """ + self._scheduler.reschedule_job(self.id, self._jobstore_alias, trigger, **trigger_args) + return self + + def pause(self): + """ + Temporarily suspend the execution of this job. + + .. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.pause_job` + + :return Job: this job instance + + """ + self._scheduler.pause_job(self.id, self._jobstore_alias) + return self + + def resume(self): + """ + Resume the schedule of this job if previously paused. + + .. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.resume_job` + + :return Job: this job instance + + """ + self._scheduler.resume_job(self.id, self._jobstore_alias) + return self + + def remove(self): + """ + Unschedules this job and removes it from its associated job store. + + .. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.remove_job` + + """ + self._scheduler.remove_job(self.id, self._jobstore_alias) + + @property + def pending(self): + """ + Returns ``True`` if the referenced job is still waiting to be added to its designated job + store. + + """ + return self._jobstore_alias is None + + # + # Private API + # + + def _get_run_times(self, now): + """ + Computes the scheduled run times between ``next_run_time`` and ``now`` (inclusive). + + :type now: datetime.datetime + :rtype: list[datetime.datetime] + """ run_times = [] - run_time = self.next_run_time - increment = timedelta(microseconds=1) - while ((not self.max_runs or self.runs < self.max_runs) and - run_time and run_time <= now): - run_times.append(run_time) - run_time = self.trigger.get_next_fire_time(run_time + increment) + next_run_time = self.next_run_time + while next_run_time and next_run_time <= now: + run_times.append(next_run_time) + next_run_time = self.trigger.get_next_fire_time(next_run_time, now) return run_times - def add_instance(self): - self._lock.acquire() - try: - if self.instances == self.max_instances: - raise MaxInstancesReachedError - self.instances += 1 - finally: - self._lock.release() + def _modify(self, **changes): + """ + Validates the changes to the Job and makes the modifications if and only if all of them + validate. - def remove_instance(self): - self._lock.acquire() - try: - assert self.instances > 0, 'Already at 0 instances' - self.instances -= 1 - finally: - self._lock.release() + """ + approved = {} + + if 'id' in changes: + value = changes.pop('id') + if not isinstance(value, six.string_types): + raise TypeError("id must be a nonempty string") + if hasattr(self, 'id'): + raise ValueError('The job ID may not be changed') + approved['id'] = value + + if 'func' in changes or 'args' in changes or 'kwargs' in changes: + func = changes.pop('func') if 'func' in changes else self.func + args = changes.pop('args') if 'args' in changes else self.args + kwargs = changes.pop('kwargs') if 'kwargs' in changes else self.kwargs + + if isinstance(func, six.string_types): + func_ref = func + func = ref_to_obj(func) + elif callable(func): + try: + func_ref = obj_to_ref(func) + except ValueError: + # If this happens, this Job won't be serializable + func_ref = None + else: + raise TypeError('func must be a callable or a textual reference to one') + + if not hasattr(self, 'name') and changes.get('name', None) is None: + changes['name'] = get_callable_name(func) + + if isinstance(args, six.string_types) or not isinstance(args, Iterable): + raise TypeError('args must be a non-string iterable') + if isinstance(kwargs, six.string_types) or not isinstance(kwargs, Mapping): + raise TypeError('kwargs must be a dict-like object') + + check_callable_args(func, args, kwargs) + + approved['func'] = func + approved['func_ref'] = func_ref + approved['args'] = args + approved['kwargs'] = kwargs + + if 'name' in changes: + value = changes.pop('name') + if not value or not isinstance(value, six.string_types): + raise TypeError("name must be a nonempty string") + approved['name'] = value + + if 'misfire_grace_time' in changes: + value = changes.pop('misfire_grace_time') + if value is not None and (not isinstance(value, six.integer_types) or value <= 0): + raise TypeError('misfire_grace_time must be either None or a positive integer') + approved['misfire_grace_time'] = value + + if 'coalesce' in changes: + value = bool(changes.pop('coalesce')) + approved['coalesce'] = value + + if 'max_instances' in changes: + value = changes.pop('max_instances') + if not isinstance(value, six.integer_types) or value <= 0: + raise TypeError('max_instances must be a positive integer') + approved['max_instances'] = value + + if 'trigger' in changes: + trigger = changes.pop('trigger') + if not isinstance(trigger, BaseTrigger): + raise TypeError('Expected a trigger instance, got %s instead' % + trigger.__class__.__name__) + + approved['trigger'] = trigger + + if 'executor' in changes: + value = changes.pop('executor') + if not isinstance(value, six.string_types): + raise TypeError('executor must be a string') + approved['executor'] = value + + if 'next_run_time' in changes: + value = changes.pop('next_run_time') + approved['next_run_time'] = convert_to_datetime(value, self._scheduler.timezone, + 'next_run_time') + + if changes: + raise AttributeError('The following are not modifiable attributes of Job: %s' % + ', '.join(changes)) + + for key, value in six.iteritems(approved): + setattr(self, key, value) def __getstate__(self): - # Prevents the unwanted pickling of transient or unpicklable variables - state = self.__dict__.copy() - state.pop('instances', None) - state.pop('func', None) - state.pop('_lock', None) - state['func_ref'] = obj_to_ref(self.func) - return state + # Don't allow this Job to be serialized if the function reference could not be determined + if not self.func_ref: + raise ValueError( + 'This Job cannot be serialized since the reference to its callable (%r) could not ' + 'be determined. Consider giving a textual reference (module:function name) ' + 'instead.' % (self.func,)) + + return { + 'version': 1, + 'id': self.id, + 'func': self.func_ref, + 'trigger': self.trigger, + 'executor': self.executor, + 'args': self.args, + 'kwargs': self.kwargs, + 'name': self.name, + 'misfire_grace_time': self.misfire_grace_time, + 'coalesce': self.coalesce, + 'max_instances': self.max_instances, + 'next_run_time': self.next_run_time + } def __setstate__(self, state): - state['instances'] = 0 - state['func'] = ref_to_obj(state.pop('func_ref')) - state['_lock'] = Lock() - self.__dict__ = state + if state.get('version', 1) > 1: + raise ValueError('Job has version %s, but only version 1 can be handled' % + state['version']) + + self.id = state['id'] + self.func_ref = state['func'] + self.func = ref_to_obj(self.func_ref) + self.trigger = state['trigger'] + self.executor = state['executor'] + self.args = state['args'] + self.kwargs = state['kwargs'] + self.name = state['name'] + self.misfire_grace_time = state['misfire_grace_time'] + self.coalesce = state['coalesce'] + self.max_instances = state['max_instances'] + self.next_run_time = state['next_run_time'] def __eq__(self, other): if isinstance(other, Job): - return self.id is not None and other.id == self.id or self is other + return self.id == other.id return NotImplemented def __repr__(self): - return '' % (self.name, repr(self.trigger)) + return '' % (repr_escape(self.id), repr_escape(self.name)) def __str__(self): - return '%s (trigger: %s, next run at: %s)' % (self.name, - str(self.trigger), str(self.next_run_time)) + return repr_escape(self.__unicode__()) + + def __unicode__(self): + if hasattr(self, 'next_run_time'): + status = ('next run at: ' + datetime_repr(self.next_run_time) if + self.next_run_time else 'paused') + else: + status = 'pending' + + return u'%s (trigger: %s, %s)' % (self.name, self.trigger, status) diff --git a/lib/apscheduler/jobstores/base.py b/lib/apscheduler/jobstores/base.py index f0a16ddb..9cff66c4 100644 --- a/lib/apscheduler/jobstores/base.py +++ b/lib/apscheduler/jobstores/base.py @@ -1,25 +1,143 @@ -""" -Abstract base class that provides the interface needed by all job stores. -Job store methods are also documented here. -""" +from abc import ABCMeta, abstractmethod +import logging + +import six -class JobStore(object): - def add_job(self, job): - """Adds the given job from this store.""" - raise NotImplementedError +class JobLookupError(KeyError): + """Raised when the job store cannot find a job for update or removal.""" - def update_job(self, job): - """Persists the running state of the given job.""" - raise NotImplementedError + def __init__(self, job_id): + super(JobLookupError, self).__init__(u'No job by the id of %s was found' % job_id) - def remove_job(self, job): - """Removes the given jobs from this store.""" - raise NotImplementedError - def load_jobs(self): - """Loads jobs from this store into memory.""" - raise NotImplementedError +class ConflictingIdError(KeyError): + """Raised when the uniqueness of job IDs is being violated.""" - def close(self): + def __init__(self, job_id): + super(ConflictingIdError, self).__init__( + u'Job identifier (%s) conflicts with an existing job' % job_id) + + +class TransientJobError(ValueError): + """ + Raised when an attempt to add transient (with no func_ref) job to a persistent job store is + detected. + """ + + def __init__(self, job_id): + super(TransientJobError, self).__init__( + u'Job (%s) cannot be added to this job store because a reference to the callable ' + u'could not be determined.' % job_id) + + +class BaseJobStore(six.with_metaclass(ABCMeta)): + """Abstract base class that defines the interface that every job store must implement.""" + + _scheduler = None + _alias = None + _logger = logging.getLogger('apscheduler.jobstores') + + def start(self, scheduler, alias): + """ + Called by the scheduler when the scheduler is being started or when the job store is being + added to an already running scheduler. + + :param apscheduler.schedulers.base.BaseScheduler scheduler: the scheduler that is starting + this job store + :param str|unicode alias: alias of this job store as it was assigned to the scheduler + """ + + self._scheduler = scheduler + self._alias = alias + self._logger = logging.getLogger('apscheduler.jobstores.%s' % alias) + + def shutdown(self): """Frees any resources still bound to this job store.""" + + def _fix_paused_jobs_sorting(self, jobs): + for i, job in enumerate(jobs): + if job.next_run_time is not None: + if i > 0: + paused_jobs = jobs[:i] + del jobs[:i] + jobs.extend(paused_jobs) + break + + @abstractmethod + def lookup_job(self, job_id): + """ + Returns a specific job, or ``None`` if it isn't found.. + + The job store is responsible for setting the ``scheduler`` and ``jobstore`` attributes of + the returned job to point to the scheduler and itself, respectively. + + :param str|unicode job_id: identifier of the job + :rtype: Job + """ + + @abstractmethod + def get_due_jobs(self, now): + """ + Returns the list of jobs that have ``next_run_time`` earlier or equal to ``now``. + The returned jobs must be sorted by next run time (ascending). + + :param datetime.datetime now: the current (timezone aware) datetime + :rtype: list[Job] + """ + + @abstractmethod + def get_next_run_time(self): + """ + Returns the earliest run time of all the jobs stored in this job store, or ``None`` if + there are no active jobs. + + :rtype: datetime.datetime + """ + + @abstractmethod + def get_all_jobs(self): + """ + Returns a list of all jobs in this job store. + The returned jobs should be sorted by next run time (ascending). + Paused jobs (next_run_time == None) should be sorted last. + + The job store is responsible for setting the ``scheduler`` and ``jobstore`` attributes of + the returned jobs to point to the scheduler and itself, respectively. + + :rtype: list[Job] + """ + + @abstractmethod + def add_job(self, job): + """ + Adds the given job to this store. + + :param Job job: the job to add + :raises ConflictingIdError: if there is another job in this store with the same ID + """ + + @abstractmethod + def update_job(self, job): + """ + Replaces the job in the store with the given newer version. + + :param Job job: the job to update + :raises JobLookupError: if the job does not exist + """ + + @abstractmethod + def remove_job(self, job_id): + """ + Removes the given job from this store. + + :param str|unicode job_id: identifier of the job + :raises JobLookupError: if the job does not exist + """ + + @abstractmethod + def remove_all_jobs(self): + """Removes all jobs from this store.""" + + def __repr__(self): + return '<%s>' % self.__class__.__name__ diff --git a/lib/apscheduler/jobstores/memory.py b/lib/apscheduler/jobstores/memory.py new file mode 100644 index 00000000..abfe7c6c --- /dev/null +++ b/lib/apscheduler/jobstores/memory.py @@ -0,0 +1,108 @@ +from __future__ import absolute_import + +from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError +from apscheduler.util import datetime_to_utc_timestamp + + +class MemoryJobStore(BaseJobStore): + """ + Stores jobs in an array in RAM. Provides no persistence support. + + Plugin alias: ``memory`` + """ + + def __init__(self): + super(MemoryJobStore, self).__init__() + # list of (job, timestamp), sorted by next_run_time and job id (ascending) + self._jobs = [] + self._jobs_index = {} # id -> (job, timestamp) lookup table + + def lookup_job(self, job_id): + return self._jobs_index.get(job_id, (None, None))[0] + + def get_due_jobs(self, now): + now_timestamp = datetime_to_utc_timestamp(now) + pending = [] + for job, timestamp in self._jobs: + if timestamp is None or timestamp > now_timestamp: + break + pending.append(job) + + return pending + + def get_next_run_time(self): + return self._jobs[0][0].next_run_time if self._jobs else None + + def get_all_jobs(self): + return [j[0] for j in self._jobs] + + def add_job(self, job): + if job.id in self._jobs_index: + raise ConflictingIdError(job.id) + + timestamp = datetime_to_utc_timestamp(job.next_run_time) + index = self._get_job_index(timestamp, job.id) + self._jobs.insert(index, (job, timestamp)) + self._jobs_index[job.id] = (job, timestamp) + + def update_job(self, job): + old_job, old_timestamp = self._jobs_index.get(job.id, (None, None)) + if old_job is None: + raise JobLookupError(job.id) + + # If the next run time has not changed, simply replace the job in its present index. + # Otherwise, reinsert the job to the list to preserve the ordering. + old_index = self._get_job_index(old_timestamp, old_job.id) + new_timestamp = datetime_to_utc_timestamp(job.next_run_time) + if old_timestamp == new_timestamp: + self._jobs[old_index] = (job, new_timestamp) + else: + del self._jobs[old_index] + new_index = self._get_job_index(new_timestamp, job.id) + self._jobs.insert(new_index, (job, new_timestamp)) + + self._jobs_index[old_job.id] = (job, new_timestamp) + + def remove_job(self, job_id): + job, timestamp = self._jobs_index.get(job_id, (None, None)) + if job is None: + raise JobLookupError(job_id) + + index = self._get_job_index(timestamp, job_id) + del self._jobs[index] + del self._jobs_index[job.id] + + def remove_all_jobs(self): + self._jobs = [] + self._jobs_index = {} + + def shutdown(self): + self.remove_all_jobs() + + def _get_job_index(self, timestamp, job_id): + """ + Returns the index of the given job, or if it's not found, the index where the job should be + inserted based on the given timestamp. + + :type timestamp: int + :type job_id: str + + """ + lo, hi = 0, len(self._jobs) + timestamp = float('inf') if timestamp is None else timestamp + while lo < hi: + mid = (lo + hi) // 2 + mid_job, mid_timestamp = self._jobs[mid] + mid_timestamp = float('inf') if mid_timestamp is None else mid_timestamp + if mid_timestamp > timestamp: + hi = mid + elif mid_timestamp < timestamp: + lo = mid + 1 + elif mid_job.id > job_id: + hi = mid + elif mid_job.id < job_id: + lo = mid + 1 + else: + return mid + + return lo diff --git a/lib/apscheduler/jobstores/mongodb.py b/lib/apscheduler/jobstores/mongodb.py new file mode 100644 index 00000000..fc88325f --- /dev/null +++ b/lib/apscheduler/jobstores/mongodb.py @@ -0,0 +1,141 @@ +from __future__ import absolute_import +import warnings + +from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError +from apscheduler.util import maybe_ref, datetime_to_utc_timestamp, utc_timestamp_to_datetime +from apscheduler.job import Job + +try: + import cPickle as pickle +except ImportError: # pragma: nocover + import pickle + +try: + from bson.binary import Binary + from pymongo.errors import DuplicateKeyError + from pymongo import MongoClient, ASCENDING +except ImportError: # pragma: nocover + raise ImportError('MongoDBJobStore requires PyMongo installed') + + +class MongoDBJobStore(BaseJobStore): + """ + Stores jobs in a MongoDB database. Any leftover keyword arguments are directly passed to + pymongo's `MongoClient + `_. + + Plugin alias: ``mongodb`` + + :param str database: database to store jobs in + :param str collection: collection to store jobs in + :param client: a :class:`~pymongo.mongo_client.MongoClient` instance to use instead of + providing connection arguments + :param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the + highest available + """ + + def __init__(self, database='apscheduler', collection='jobs', client=None, + pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args): + super(MongoDBJobStore, self).__init__() + self.pickle_protocol = pickle_protocol + + if not database: + raise ValueError('The "database" parameter must not be empty') + if not collection: + raise ValueError('The "collection" parameter must not be empty') + + if client: + self.client = maybe_ref(client) + else: + connect_args.setdefault('w', 1) + self.client = MongoClient(**connect_args) + + self.collection = self.client[database][collection] + + def start(self, scheduler, alias): + super(MongoDBJobStore, self).start(scheduler, alias) + self.collection.ensure_index('next_run_time', sparse=True) + + @property + def connection(self): + warnings.warn('The "connection" member is deprecated -- use "client" instead', + DeprecationWarning) + return self.client + + def lookup_job(self, job_id): + document = self.collection.find_one(job_id, ['job_state']) + return self._reconstitute_job(document['job_state']) if document else None + + def get_due_jobs(self, now): + timestamp = datetime_to_utc_timestamp(now) + return self._get_jobs({'next_run_time': {'$lte': timestamp}}) + + def get_next_run_time(self): + document = self.collection.find_one({'next_run_time': {'$ne': None}}, + projection=['next_run_time'], + sort=[('next_run_time', ASCENDING)]) + return utc_timestamp_to_datetime(document['next_run_time']) if document else None + + def get_all_jobs(self): + jobs = self._get_jobs({}) + self._fix_paused_jobs_sorting(jobs) + return jobs + + def add_job(self, job): + try: + self.collection.insert({ + '_id': job.id, + 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), + 'job_state': Binary(pickle.dumps(job.__getstate__(), self.pickle_protocol)) + }) + except DuplicateKeyError: + raise ConflictingIdError(job.id) + + def update_job(self, job): + changes = { + 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), + 'job_state': Binary(pickle.dumps(job.__getstate__(), self.pickle_protocol)) + } + result = self.collection.update({'_id': job.id}, {'$set': changes}) + if result and result['n'] == 0: + raise JobLookupError(job.id) + + def remove_job(self, job_id): + result = self.collection.remove(job_id) + if result and result['n'] == 0: + raise JobLookupError(job_id) + + def remove_all_jobs(self): + self.collection.remove() + + def shutdown(self): + self.client.close() + + def _reconstitute_job(self, job_state): + job_state = pickle.loads(job_state) + job = Job.__new__(Job) + job.__setstate__(job_state) + job._scheduler = self._scheduler + job._jobstore_alias = self._alias + return job + + def _get_jobs(self, conditions): + jobs = [] + failed_job_ids = [] + for document in self.collection.find(conditions, ['_id', 'job_state'], + sort=[('next_run_time', ASCENDING)]): + try: + jobs.append(self._reconstitute_job(document['job_state'])) + except: + self._logger.exception('Unable to restore job "%s" -- removing it', + document['_id']) + failed_job_ids.append(document['_id']) + + # Remove all the jobs we failed to restore + if failed_job_ids: + self.collection.remove({'_id': {'$in': failed_job_ids}}) + + return jobs + + def __repr__(self): + return '<%s (client=%s)>' % (self.__class__.__name__, self.client) diff --git a/lib/apscheduler/jobstores/mongodb_store.py b/lib/apscheduler/jobstores/mongodb_store.py deleted file mode 100644 index 3f522c25..00000000 --- a/lib/apscheduler/jobstores/mongodb_store.py +++ /dev/null @@ -1,84 +0,0 @@ -""" -Stores jobs in a MongoDB database. -""" -import logging - -from apscheduler.jobstores.base import JobStore -from apscheduler.job import Job - -try: - import cPickle as pickle -except ImportError: # pragma: nocover - import pickle - -try: - from bson.binary import Binary - from pymongo.connection import Connection -except ImportError: # pragma: nocover - raise ImportError('MongoDBJobStore requires PyMongo installed') - -logger = logging.getLogger(__name__) - - -class MongoDBJobStore(JobStore): - def __init__(self, database='apscheduler', collection='jobs', - connection=None, pickle_protocol=pickle.HIGHEST_PROTOCOL, - **connect_args): - self.jobs = [] - self.pickle_protocol = pickle_protocol - - if not database: - raise ValueError('The "database" parameter must not be empty') - if not collection: - raise ValueError('The "collection" parameter must not be empty') - - if connection: - self.connection = connection - else: - self.connection = Connection(**connect_args) - - self.collection = self.connection[database][collection] - - def add_job(self, job): - job_dict = job.__getstate__() - job_dict['trigger'] = Binary(pickle.dumps(job.trigger, - self.pickle_protocol)) - job_dict['args'] = Binary(pickle.dumps(job.args, - self.pickle_protocol)) - job_dict['kwargs'] = Binary(pickle.dumps(job.kwargs, - self.pickle_protocol)) - job.id = self.collection.insert(job_dict) - self.jobs.append(job) - - def remove_job(self, job): - self.collection.remove(job.id) - self.jobs.remove(job) - - def load_jobs(self): - jobs = [] - for job_dict in self.collection.find(): - try: - job = Job.__new__(Job) - job_dict['id'] = job_dict.pop('_id') - job_dict['trigger'] = pickle.loads(job_dict['trigger']) - job_dict['args'] = pickle.loads(job_dict['args']) - job_dict['kwargs'] = pickle.loads(job_dict['kwargs']) - job.__setstate__(job_dict) - jobs.append(job) - except Exception: - job_name = job_dict.get('name', '(unknown)') - logger.exception('Unable to restore job "%s"', job_name) - self.jobs = jobs - - def update_job(self, job): - spec = {'_id': job.id} - document = {'$set': {'next_run_time': job.next_run_time}, - '$inc': {'runs': 1}} - self.collection.update(spec, document) - - def close(self): - self.connection.disconnect() - - def __repr__(self): - connection = self.collection.database.connection - return '<%s (connection=%s)>' % (self.__class__.__name__, connection) diff --git a/lib/apscheduler/jobstores/ram_store.py b/lib/apscheduler/jobstores/ram_store.py deleted file mode 100644 index 85091fe8..00000000 --- a/lib/apscheduler/jobstores/ram_store.py +++ /dev/null @@ -1,25 +0,0 @@ -""" -Stores jobs in an array in RAM. Provides no persistence support. -""" - -from apscheduler.jobstores.base import JobStore - - -class RAMJobStore(JobStore): - def __init__(self): - self.jobs = [] - - def add_job(self, job): - self.jobs.append(job) - - def update_job(self, job): - pass - - def remove_job(self, job): - self.jobs.remove(job) - - def load_jobs(self): - pass - - def __repr__(self): - return '<%s>' % (self.__class__.__name__) diff --git a/lib/apscheduler/jobstores/redis.py b/lib/apscheduler/jobstores/redis.py new file mode 100644 index 00000000..4e092f7d --- /dev/null +++ b/lib/apscheduler/jobstores/redis.py @@ -0,0 +1,146 @@ +from __future__ import absolute_import +from datetime import datetime + +from pytz import utc +import six + +from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError +from apscheduler.util import datetime_to_utc_timestamp, utc_timestamp_to_datetime +from apscheduler.job import Job + +try: + import cPickle as pickle +except ImportError: # pragma: nocover + import pickle + +try: + from redis import StrictRedis +except ImportError: # pragma: nocover + raise ImportError('RedisJobStore requires redis installed') + + +class RedisJobStore(BaseJobStore): + """ + Stores jobs in a Redis database. Any leftover keyword arguments are directly passed to redis's + :class:`~redis.StrictRedis`. + + Plugin alias: ``redis`` + + :param int db: the database number to store jobs in + :param str jobs_key: key to store jobs in + :param str run_times_key: key to store the jobs' run times in + :param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the + highest available + """ + + def __init__(self, db=0, jobs_key='apscheduler.jobs', run_times_key='apscheduler.run_times', + pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args): + super(RedisJobStore, self).__init__() + + if db is None: + raise ValueError('The "db" parameter must not be empty') + if not jobs_key: + raise ValueError('The "jobs_key" parameter must not be empty') + if not run_times_key: + raise ValueError('The "run_times_key" parameter must not be empty') + + self.pickle_protocol = pickle_protocol + self.jobs_key = jobs_key + self.run_times_key = run_times_key + self.redis = StrictRedis(db=int(db), **connect_args) + + def lookup_job(self, job_id): + job_state = self.redis.hget(self.jobs_key, job_id) + return self._reconstitute_job(job_state) if job_state else None + + def get_due_jobs(self, now): + timestamp = datetime_to_utc_timestamp(now) + job_ids = self.redis.zrangebyscore(self.run_times_key, 0, timestamp) + if job_ids: + job_states = self.redis.hmget(self.jobs_key, *job_ids) + return self._reconstitute_jobs(six.moves.zip(job_ids, job_states)) + return [] + + def get_next_run_time(self): + next_run_time = self.redis.zrange(self.run_times_key, 0, 0, withscores=True) + if next_run_time: + return utc_timestamp_to_datetime(next_run_time[0][1]) + + def get_all_jobs(self): + job_states = self.redis.hgetall(self.jobs_key) + jobs = self._reconstitute_jobs(six.iteritems(job_states)) + paused_sort_key = datetime(9999, 12, 31, tzinfo=utc) + return sorted(jobs, key=lambda job: job.next_run_time or paused_sort_key) + + def add_job(self, job): + if self.redis.hexists(self.jobs_key, job.id): + raise ConflictingIdError(job.id) + + with self.redis.pipeline() as pipe: + pipe.multi() + pipe.hset(self.jobs_key, job.id, pickle.dumps(job.__getstate__(), + self.pickle_protocol)) + if job.next_run_time: + pipe.zadd(self.run_times_key, datetime_to_utc_timestamp(job.next_run_time), job.id) + pipe.execute() + + def update_job(self, job): + if not self.redis.hexists(self.jobs_key, job.id): + raise JobLookupError(job.id) + + with self.redis.pipeline() as pipe: + pipe.hset(self.jobs_key, job.id, pickle.dumps(job.__getstate__(), + self.pickle_protocol)) + if job.next_run_time: + pipe.zadd(self.run_times_key, datetime_to_utc_timestamp(job.next_run_time), job.id) + else: + pipe.zrem(self.run_times_key, job.id) + pipe.execute() + + def remove_job(self, job_id): + if not self.redis.hexists(self.jobs_key, job_id): + raise JobLookupError(job_id) + + with self.redis.pipeline() as pipe: + pipe.hdel(self.jobs_key, job_id) + pipe.zrem(self.run_times_key, job_id) + pipe.execute() + + def remove_all_jobs(self): + with self.redis.pipeline() as pipe: + pipe.delete(self.jobs_key) + pipe.delete(self.run_times_key) + pipe.execute() + + def shutdown(self): + self.redis.connection_pool.disconnect() + + def _reconstitute_job(self, job_state): + job_state = pickle.loads(job_state) + job = Job.__new__(Job) + job.__setstate__(job_state) + job._scheduler = self._scheduler + job._jobstore_alias = self._alias + return job + + def _reconstitute_jobs(self, job_states): + jobs = [] + failed_job_ids = [] + for job_id, job_state in job_states: + try: + jobs.append(self._reconstitute_job(job_state)) + except: + self._logger.exception('Unable to restore job "%s" -- removing it', job_id) + failed_job_ids.append(job_id) + + # Remove all the jobs we failed to restore + if failed_job_ids: + with self.redis.pipeline() as pipe: + pipe.hdel(self.jobs_key, *failed_job_ids) + pipe.zrem(self.run_times_key, *failed_job_ids) + pipe.execute() + + return jobs + + def __repr__(self): + return '<%s>' % self.__class__.__name__ diff --git a/lib/apscheduler/jobstores/rethinkdb.py b/lib/apscheduler/jobstores/rethinkdb.py new file mode 100644 index 00000000..2185c6cc --- /dev/null +++ b/lib/apscheduler/jobstores/rethinkdb.py @@ -0,0 +1,153 @@ +from __future__ import absolute_import + +from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError +from apscheduler.util import maybe_ref, datetime_to_utc_timestamp, utc_timestamp_to_datetime +from apscheduler.job import Job + +try: + import cPickle as pickle +except ImportError: # pragma: nocover + import pickle + +try: + import rethinkdb as r +except ImportError: # pragma: nocover + raise ImportError('RethinkDBJobStore requires rethinkdb installed') + + +class RethinkDBJobStore(BaseJobStore): + """ + Stores jobs in a RethinkDB database. Any leftover keyword arguments are directly passed to + rethinkdb's `RethinkdbClient `_. + + Plugin alias: ``rethinkdb`` + + :param str database: database to store jobs in + :param str collection: collection to store jobs in + :param client: a :class:`rethinkdb.net.Connection` instance to use instead of providing + connection arguments + :param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the + highest available + """ + + def __init__(self, database='apscheduler', table='jobs', client=None, + pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args): + super(RethinkDBJobStore, self).__init__() + + if not database: + raise ValueError('The "database" parameter must not be empty') + if not table: + raise ValueError('The "table" parameter must not be empty') + + self.database = database + self.table = table + self.client = client + self.pickle_protocol = pickle_protocol + self.connect_args = connect_args + self.conn = None + + def start(self, scheduler, alias): + super(RethinkDBJobStore, self).start(scheduler, alias) + + if self.client: + self.conn = maybe_ref(self.client) + else: + self.conn = r.connect(db=self.database, **self.connect_args) + + if self.database not in r.db_list().run(self.conn): + r.db_create(self.database).run(self.conn) + + if self.table not in r.table_list().run(self.conn): + r.table_create(self.table).run(self.conn) + + if 'next_run_time' not in r.table(self.table).index_list().run(self.conn): + r.table(self.table).index_create('next_run_time').run(self.conn) + + self.table = r.db(self.database).table(self.table) + + def lookup_job(self, job_id): + results = list(self.table.get_all(job_id).pluck('job_state').run(self.conn)) + return self._reconstitute_job(results[0]['job_state']) if results else None + + def get_due_jobs(self, now): + return self._get_jobs(r.row['next_run_time'] <= datetime_to_utc_timestamp(now)) + + def get_next_run_time(self): + results = list( + self.table + .filter(r.row['next_run_time'] != None) # flake8: noqa + .order_by(r.asc('next_run_time')) + .map(lambda x: x['next_run_time']) + .limit(1) + .run(self.conn) + ) + return utc_timestamp_to_datetime(results[0]) if results else None + + def get_all_jobs(self): + jobs = self._get_jobs() + self._fix_paused_jobs_sorting(jobs) + return jobs + + def add_job(self, job): + job_dict = { + 'id': job.id, + 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), + 'job_state': r.binary(pickle.dumps(job.__getstate__(), self.pickle_protocol)) + } + results = self.table.insert(job_dict).run(self.conn) + if results['errors'] > 0: + raise ConflictingIdError(job.id) + + def update_job(self, job): + changes = { + 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), + 'job_state': r.binary(pickle.dumps(job.__getstate__(), self.pickle_protocol)) + } + results = self.table.get_all(job.id).update(changes).run(self.conn) + skipped = False in map(lambda x: results[x] == 0, results.keys()) + if results['skipped'] > 0 or results['errors'] > 0 or not skipped: + raise JobLookupError(job.id) + + def remove_job(self, job_id): + results = self.table.get_all(job_id).delete().run(self.conn) + if results['deleted'] + results['skipped'] != 1: + raise JobLookupError(job_id) + + def remove_all_jobs(self): + self.table.delete().run(self.conn) + + def shutdown(self): + self.conn.close() + + def _reconstitute_job(self, job_state): + job_state = pickle.loads(job_state) + job = Job.__new__(Job) + job.__setstate__(job_state) + job._scheduler = self._scheduler + job._jobstore_alias = self._alias + return job + + def _get_jobs(self, predicate=None): + jobs = [] + failed_job_ids = [] + query = (self.table.filter(r.row['next_run_time'] != None).filter(predicate) if + predicate else self.table) + query = query.order_by('next_run_time', 'id').pluck('id', 'job_state') + + for document in query.run(self.conn): + try: + jobs.append(self._reconstitute_job(document['job_state'])) + except: + self._logger.exception('Unable to restore job "%s" -- removing it', document['id']) + failed_job_ids.append(document['id']) + + # Remove all the jobs we failed to restore + if failed_job_ids: + r.expr(failed_job_ids).for_each( + lambda job_id: self.table.get_all(job_id).delete()).run(self.conn) + + return jobs + + def __repr__(self): + connection = self.conn + return '<%s (connection=%s)>' % (self.__class__.__name__, connection) diff --git a/lib/apscheduler/jobstores/shelve_store.py b/lib/apscheduler/jobstores/shelve_store.py deleted file mode 100644 index 87c95f8f..00000000 --- a/lib/apscheduler/jobstores/shelve_store.py +++ /dev/null @@ -1,65 +0,0 @@ -""" -Stores jobs in a file governed by the :mod:`shelve` module. -""" - -import shelve -import pickle -import random -import logging - -from apscheduler.jobstores.base import JobStore -from apscheduler.job import Job -from apscheduler.util import itervalues - -logger = logging.getLogger(__name__) - - -class ShelveJobStore(JobStore): - MAX_ID = 1000000 - - def __init__(self, path, pickle_protocol=pickle.HIGHEST_PROTOCOL): - self.jobs = [] - self.path = path - self.pickle_protocol = pickle_protocol - self.store = shelve.open(path, 'c', self.pickle_protocol) - - def _generate_id(self): - id = None - while not id: - id = str(random.randint(1, self.MAX_ID)) - if not id in self.store: - return id - - def add_job(self, job): - job.id = self._generate_id() - self.jobs.append(job) - self.store[job.id] = job.__getstate__() - - def update_job(self, job): - job_dict = self.store[job.id] - job_dict['next_run_time'] = job.next_run_time - job_dict['runs'] = job.runs - self.store[job.id] = job_dict - - def remove_job(self, job): - del self.store[job.id] - self.jobs.remove(job) - - def load_jobs(self): - jobs = [] - for job_dict in itervalues(self.store): - try: - job = Job.__new__(Job) - job.__setstate__(job_dict) - jobs.append(job) - except Exception: - job_name = job_dict.get('name', '(unknown)') - logger.exception('Unable to restore job "%s"', job_name) - - self.jobs = jobs - - def close(self): - self.store.close() - - def __repr__(self): - return '<%s (path=%s)>' % (self.__class__.__name__, self.path) diff --git a/lib/apscheduler/jobstores/sqlalchemy.py b/lib/apscheduler/jobstores/sqlalchemy.py new file mode 100644 index 00000000..b82696e2 --- /dev/null +++ b/lib/apscheduler/jobstores/sqlalchemy.py @@ -0,0 +1,148 @@ +from __future__ import absolute_import + +from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError +from apscheduler.util import maybe_ref, datetime_to_utc_timestamp, utc_timestamp_to_datetime +from apscheduler.job import Job + +try: + import cPickle as pickle +except ImportError: # pragma: nocover + import pickle + +try: + from sqlalchemy import ( + create_engine, Table, Column, MetaData, Unicode, Float, LargeBinary, select) + from sqlalchemy.exc import IntegrityError + from sqlalchemy.sql.expression import null +except ImportError: # pragma: nocover + raise ImportError('SQLAlchemyJobStore requires SQLAlchemy installed') + + +class SQLAlchemyJobStore(BaseJobStore): + """ + Stores jobs in a database table using SQLAlchemy. + The table will be created if it doesn't exist in the database. + + Plugin alias: ``sqlalchemy`` + + :param str url: connection string (see `SQLAlchemy documentation + `_ + on this) + :param engine: an SQLAlchemy Engine to use instead of creating a new one based on ``url`` + :param str tablename: name of the table to store jobs in + :param metadata: a :class:`~sqlalchemy.MetaData` instance to use instead of creating a new one + :param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the + highest available + """ + + def __init__(self, url=None, engine=None, tablename='apscheduler_jobs', metadata=None, + pickle_protocol=pickle.HIGHEST_PROTOCOL): + super(SQLAlchemyJobStore, self).__init__() + self.pickle_protocol = pickle_protocol + metadata = maybe_ref(metadata) or MetaData() + + if engine: + self.engine = maybe_ref(engine) + elif url: + self.engine = create_engine(url) + else: + raise ValueError('Need either "engine" or "url" defined') + + # 191 = max key length in MySQL for InnoDB/utf8mb4 tables, + # 25 = precision that translates to an 8-byte float + self.jobs_t = Table( + tablename, metadata, + Column('id', Unicode(191, _warn_on_bytestring=False), primary_key=True), + Column('next_run_time', Float(25), index=True), + Column('job_state', LargeBinary, nullable=False) + ) + + def start(self, scheduler, alias): + super(SQLAlchemyJobStore, self).start(scheduler, alias) + self.jobs_t.create(self.engine, True) + + def lookup_job(self, job_id): + selectable = select([self.jobs_t.c.job_state]).where(self.jobs_t.c.id == job_id) + job_state = self.engine.execute(selectable).scalar() + return self._reconstitute_job(job_state) if job_state else None + + def get_due_jobs(self, now): + timestamp = datetime_to_utc_timestamp(now) + return self._get_jobs(self.jobs_t.c.next_run_time <= timestamp) + + def get_next_run_time(self): + selectable = select([self.jobs_t.c.next_run_time]).\ + where(self.jobs_t.c.next_run_time != null()).\ + order_by(self.jobs_t.c.next_run_time).limit(1) + next_run_time = self.engine.execute(selectable).scalar() + return utc_timestamp_to_datetime(next_run_time) + + def get_all_jobs(self): + jobs = self._get_jobs() + self._fix_paused_jobs_sorting(jobs) + return jobs + + def add_job(self, job): + insert = self.jobs_t.insert().values(**{ + 'id': job.id, + 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), + 'job_state': pickle.dumps(job.__getstate__(), self.pickle_protocol) + }) + try: + self.engine.execute(insert) + except IntegrityError: + raise ConflictingIdError(job.id) + + def update_job(self, job): + update = self.jobs_t.update().values(**{ + 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), + 'job_state': pickle.dumps(job.__getstate__(), self.pickle_protocol) + }).where(self.jobs_t.c.id == job.id) + result = self.engine.execute(update) + if result.rowcount == 0: + raise JobLookupError(id) + + def remove_job(self, job_id): + delete = self.jobs_t.delete().where(self.jobs_t.c.id == job_id) + result = self.engine.execute(delete) + if result.rowcount == 0: + raise JobLookupError(job_id) + + def remove_all_jobs(self): + delete = self.jobs_t.delete() + self.engine.execute(delete) + + def shutdown(self): + self.engine.dispose() + + def _reconstitute_job(self, job_state): + job_state = pickle.loads(job_state) + job_state['jobstore'] = self + job = Job.__new__(Job) + job.__setstate__(job_state) + job._scheduler = self._scheduler + job._jobstore_alias = self._alias + return job + + def _get_jobs(self, *conditions): + jobs = [] + selectable = select([self.jobs_t.c.id, self.jobs_t.c.job_state]).\ + order_by(self.jobs_t.c.next_run_time) + selectable = selectable.where(*conditions) if conditions else selectable + failed_job_ids = set() + for row in self.engine.execute(selectable): + try: + jobs.append(self._reconstitute_job(row.job_state)) + except: + self._logger.exception('Unable to restore job "%s" -- removing it', row.id) + failed_job_ids.add(row.id) + + # Remove all the jobs we failed to restore + if failed_job_ids: + delete = self.jobs_t.delete().where(self.jobs_t.c.id.in_(failed_job_ids)) + self.engine.execute(delete) + + return jobs + + def __repr__(self): + return '<%s (url=%s)>' % (self.__class__.__name__, self.engine.url) diff --git a/lib/apscheduler/jobstores/sqlalchemy_store.py b/lib/apscheduler/jobstores/sqlalchemy_store.py deleted file mode 100644 index 8ece7e24..00000000 --- a/lib/apscheduler/jobstores/sqlalchemy_store.py +++ /dev/null @@ -1,87 +0,0 @@ -""" -Stores jobs in a database table using SQLAlchemy. -""" -import pickle -import logging - -from apscheduler.jobstores.base import JobStore -from apscheduler.job import Job - -try: - from sqlalchemy import * -except ImportError: # pragma: nocover - raise ImportError('SQLAlchemyJobStore requires SQLAlchemy installed') - -logger = logging.getLogger(__name__) - - -class SQLAlchemyJobStore(JobStore): - def __init__(self, url=None, engine=None, tablename='apscheduler_jobs', - metadata=None, pickle_protocol=pickle.HIGHEST_PROTOCOL): - self.jobs = [] - self.pickle_protocol = pickle_protocol - - if engine: - self.engine = engine - elif url: - self.engine = create_engine(url) - else: - raise ValueError('Need either "engine" or "url" defined') - - self.jobs_t = Table(tablename, metadata or MetaData(), - Column('id', Integer, - Sequence(tablename + '_id_seq', optional=True), - primary_key=True), - Column('trigger', PickleType(pickle_protocol, mutable=False), - nullable=False), - Column('func_ref', String(1024), nullable=False), - Column('args', PickleType(pickle_protocol, mutable=False), - nullable=False), - Column('kwargs', PickleType(pickle_protocol, mutable=False), - nullable=False), - Column('name', Unicode(1024), unique=True), - Column('misfire_grace_time', Integer, nullable=False), - Column('coalesce', Boolean, nullable=False), - Column('max_runs', Integer), - Column('max_instances', Integer), - Column('next_run_time', DateTime, nullable=False), - Column('runs', BigInteger)) - - self.jobs_t.create(self.engine, True) - - def add_job(self, job): - job_dict = job.__getstate__() - result = self.engine.execute(self.jobs_t.insert().values(**job_dict)) - job.id = result.inserted_primary_key[0] - self.jobs.append(job) - - def remove_job(self, job): - delete = self.jobs_t.delete().where(self.jobs_t.c.id == job.id) - self.engine.execute(delete) - self.jobs.remove(job) - - def load_jobs(self): - jobs = [] - for row in self.engine.execute(select([self.jobs_t])): - try: - job = Job.__new__(Job) - job_dict = dict(row.items()) - job.__setstate__(job_dict) - jobs.append(job) - except Exception: - job_name = job_dict.get('name', '(unknown)') - logger.exception('Unable to restore job "%s"', job_name) - self.jobs = jobs - - def update_job(self, job): - job_dict = job.__getstate__() - update = self.jobs_t.update().where(self.jobs_t.c.id == job.id).\ - values(next_run_time=job_dict['next_run_time'], - runs=job_dict['runs']) - self.engine.execute(update) - - def close(self): - self.engine.dispose() - - def __repr__(self): - return '<%s (url=%s)>' % (self.__class__.__name__, self.engine.url) diff --git a/lib/apscheduler/jobstores/zookeeper.py b/lib/apscheduler/jobstores/zookeeper.py new file mode 100644 index 00000000..44e2e61f --- /dev/null +++ b/lib/apscheduler/jobstores/zookeeper.py @@ -0,0 +1,179 @@ +from __future__ import absolute_import + +import os +from datetime import datetime + +from pytz import utc +from kazoo.exceptions import NoNodeError, NodeExistsError + +from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError +from apscheduler.util import maybe_ref, datetime_to_utc_timestamp, utc_timestamp_to_datetime +from apscheduler.job import Job + +try: + import cPickle as pickle +except ImportError: # pragma: nocover + import pickle + +try: + from kazoo.client import KazooClient +except ImportError: # pragma: nocover + raise ImportError('ZooKeeperJobStore requires Kazoo installed') + + +class ZooKeeperJobStore(BaseJobStore): + """ + Stores jobs in a ZooKeeper tree. Any leftover keyword arguments are directly passed to + kazoo's `KazooClient + `_. + + Plugin alias: ``zookeeper`` + + :param str path: path to store jobs in + :param client: a :class:`~kazoo.client.KazooClient` instance to use instead of + providing connection arguments + :param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the + highest available + """ + + def __init__(self, path='/apscheduler', client=None, close_connection_on_exit=False, + pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args): + super(ZooKeeperJobStore, self).__init__() + self.pickle_protocol = pickle_protocol + self.close_connection_on_exit = close_connection_on_exit + + if not path: + raise ValueError('The "path" parameter must not be empty') + + self.path = path + + if client: + self.client = maybe_ref(client) + else: + self.client = KazooClient(**connect_args) + self._ensured_path = False + + def _ensure_paths(self): + if not self._ensured_path: + self.client.ensure_path(self.path) + self._ensured_path = True + + def start(self, scheduler, alias): + super(ZooKeeperJobStore, self).start(scheduler, alias) + if not self.client.connected: + self.client.start() + + def lookup_job(self, job_id): + self._ensure_paths() + node_path = os.path.join(self.path, job_id) + try: + content, _ = self.client.get(node_path) + doc = pickle.loads(content) + job = self._reconstitute_job(doc['job_state']) + return job + except: + return None + + def get_due_jobs(self, now): + timestamp = datetime_to_utc_timestamp(now) + jobs = [job_def['job'] for job_def in self._get_jobs() + if job_def['next_run_time'] is not None and job_def['next_run_time'] <= timestamp] + return jobs + + def get_next_run_time(self): + next_runs = [job_def['next_run_time'] for job_def in self._get_jobs() + if job_def['next_run_time'] is not None] + return utc_timestamp_to_datetime(min(next_runs)) if len(next_runs) > 0 else None + + def get_all_jobs(self): + jobs = [job_def['job'] for job_def in self._get_jobs()] + self._fix_paused_jobs_sorting(jobs) + return jobs + + def add_job(self, job): + self._ensure_paths() + node_path = os.path.join(self.path, str(job.id)) + value = { + 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), + 'job_state': job.__getstate__() + } + data = pickle.dumps(value, self.pickle_protocol) + try: + self.client.create(node_path, value=data) + except NodeExistsError: + raise ConflictingIdError(job.id) + + def update_job(self, job): + self._ensure_paths() + node_path = os.path.join(self.path, str(job.id)) + changes = { + 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), + 'job_state': job.__getstate__() + } + data = pickle.dumps(changes, self.pickle_protocol) + try: + self.client.set(node_path, value=data) + except NoNodeError: + raise JobLookupError(job.id) + + def remove_job(self, job_id): + self._ensure_paths() + node_path = os.path.join(self.path, str(job_id)) + try: + self.client.delete(node_path) + except NoNodeError: + raise JobLookupError(job_id) + + def remove_all_jobs(self): + try: + self.client.delete(self.path, recursive=True) + except NoNodeError: + pass + self._ensured_path = False + + def shutdown(self): + if self.close_connection_on_exit: + self.client.stop() + self.client.close() + + def _reconstitute_job(self, job_state): + job_state = job_state + job = Job.__new__(Job) + job.__setstate__(job_state) + job._scheduler = self._scheduler + job._jobstore_alias = self._alias + return job + + def _get_jobs(self): + self._ensure_paths() + jobs = [] + failed_job_ids = [] + all_ids = self.client.get_children(self.path) + for node_name in all_ids: + try: + node_path = os.path.join(self.path, node_name) + content, _ = self.client.get(node_path) + doc = pickle.loads(content) + job_def = { + 'job_id': node_name, + 'next_run_time': doc['next_run_time'] if doc['next_run_time'] else None, + 'job_state': doc['job_state'], + 'job': self._reconstitute_job(doc['job_state']), + 'creation_time': _.ctime + } + jobs.append(job_def) + except: + self._logger.exception('Unable to restore job "%s" -- removing it' % node_name) + failed_job_ids.append(node_name) + + # Remove all the jobs we failed to restore + if failed_job_ids: + for failed_id in failed_job_ids: + self.remove_job(failed_id) + paused_sort_key = datetime(9999, 12, 31, tzinfo=utc) + return sorted(jobs, key=lambda job_def: (job_def['job'].next_run_time or paused_sort_key, + job_def['creation_time'])) + + def __repr__(self): + self._logger.exception('<%s (client=%s)>' % (self.__class__.__name__, self.client)) + return '<%s (client=%s)>' % (self.__class__.__name__, self.client) diff --git a/lib/apscheduler/scheduler.py b/lib/apscheduler/scheduler.py deleted file mode 100644 index ee08ad8b..00000000 --- a/lib/apscheduler/scheduler.py +++ /dev/null @@ -1,559 +0,0 @@ -""" -This module is the main part of the library. It houses the Scheduler class -and related exceptions. -""" - -from threading import Thread, Event, Lock -from datetime import datetime, timedelta -from logging import getLogger -import os -import sys - -from apscheduler.util import * -from apscheduler.triggers import SimpleTrigger, IntervalTrigger, CronTrigger -from apscheduler.jobstores.ram_store import RAMJobStore -from apscheduler.job import Job, MaxInstancesReachedError -from apscheduler.events import * -from apscheduler.threadpool import ThreadPool - -logger = getLogger(__name__) - - -class SchedulerAlreadyRunningError(Exception): - """ - Raised when attempting to start or configure the scheduler when it's - already running. - """ - - def __str__(self): - return 'Scheduler is already running' - - -class Scheduler(object): - """ - This class is responsible for scheduling jobs and triggering - their execution. - """ - - _stopped = False - _thread = None - - def __init__(self, gconfig={}, **options): - self._wakeup = Event() - self._jobstores = {} - self._jobstores_lock = Lock() - self._listeners = [] - self._listeners_lock = Lock() - self._pending_jobs = [] - self.configure(gconfig, **options) - - def configure(self, gconfig={}, **options): - """ - Reconfigures the scheduler with the given options. Can only be done - when the scheduler isn't running. - """ - if self.running: - raise SchedulerAlreadyRunningError - - # Set general options - config = combine_opts(gconfig, 'apscheduler.', options) - self.misfire_grace_time = int(config.pop('misfire_grace_time', 1)) - self.coalesce = asbool(config.pop('coalesce', True)) - self.daemonic = asbool(config.pop('daemonic', True)) - - # Configure the thread pool - if 'threadpool' in config: - self._threadpool = maybe_ref(config['threadpool']) - else: - threadpool_opts = combine_opts(config, 'threadpool.') - self._threadpool = ThreadPool(**threadpool_opts) - - # Configure job stores - jobstore_opts = combine_opts(config, 'jobstore.') - jobstores = {} - for key, value in jobstore_opts.items(): - store_name, option = key.split('.', 1) - opts_dict = jobstores.setdefault(store_name, {}) - opts_dict[option] = value - - for alias, opts in jobstores.items(): - classname = opts.pop('class') - cls = maybe_ref(classname) - jobstore = cls(**opts) - self.add_jobstore(jobstore, alias, True) - - def start(self): - """ - Starts the scheduler in a new thread. - """ - if self.running: - raise SchedulerAlreadyRunningError - - # Create a RAMJobStore as the default if there is no default job store - if not 'default' in self._jobstores: - self.add_jobstore(RAMJobStore(), 'default', True) - - # Schedule all pending jobs - for job, jobstore in self._pending_jobs: - self._real_add_job(job, jobstore, False) - del self._pending_jobs[:] - - self._stopped = False - self._thread = Thread(target=self._main_loop, name='APScheduler') - self._thread.setDaemon(self.daemonic) - self._thread.start() - - def shutdown(self, wait=True, shutdown_threadpool=True): - """ - Shuts down the scheduler and terminates the thread. - Does not interrupt any currently running jobs. - - :param wait: ``True`` to wait until all currently executing jobs have - finished (if ``shutdown_threadpool`` is also ``True``) - :param shutdown_threadpool: ``True`` to shut down the thread pool - """ - if not self.running: - return - - self._stopped = True - self._wakeup.set() - - # Shut down the thread pool - if shutdown_threadpool: - self._threadpool.shutdown(wait) - - # Wait until the scheduler thread terminates - self._thread.join() - - @property - def running(self): - return not self._stopped and self._thread and self._thread.isAlive() - - def add_jobstore(self, jobstore, alias, quiet=False): - """ - Adds a job store to this scheduler. - - :param jobstore: job store to be added - :param alias: alias for the job store - :param quiet: True to suppress scheduler thread wakeup - :type jobstore: instance of - :class:`~apscheduler.jobstores.base.JobStore` - :type alias: str - """ - self._jobstores_lock.acquire() - try: - if alias in self._jobstores: - raise KeyError('Alias "%s" is already in use' % alias) - self._jobstores[alias] = jobstore - jobstore.load_jobs() - finally: - self._jobstores_lock.release() - - # Notify listeners that a new job store has been added - self._notify_listeners(JobStoreEvent(EVENT_JOBSTORE_ADDED, alias)) - - # Notify the scheduler so it can scan the new job store for jobs - if not quiet: - self._wakeup.set() - - def remove_jobstore(self, alias): - """ - Removes the job store by the given alias from this scheduler. - - :type alias: str - """ - self._jobstores_lock.acquire() - try: - try: - del self._jobstores[alias] - except KeyError: - raise KeyError('No such job store: %s' % alias) - finally: - self._jobstores_lock.release() - - # Notify listeners that a job store has been removed - self._notify_listeners(JobStoreEvent(EVENT_JOBSTORE_REMOVED, alias)) - - def add_listener(self, callback, mask=EVENT_ALL): - """ - Adds a listener for scheduler events. When a matching event occurs, - ``callback`` is executed with the event object as its sole argument. - If the ``mask`` parameter is not provided, the callback will receive - events of all types. - - :param callback: any callable that takes one argument - :param mask: bitmask that indicates which events should be listened to - """ - self._listeners_lock.acquire() - try: - self._listeners.append((callback, mask)) - finally: - self._listeners_lock.release() - - def remove_listener(self, callback): - """ - Removes a previously added event listener. - """ - self._listeners_lock.acquire() - try: - for i, (cb, _) in enumerate(self._listeners): - if callback == cb: - del self._listeners[i] - finally: - self._listeners_lock.release() - - def _notify_listeners(self, event): - self._listeners_lock.acquire() - try: - listeners = tuple(self._listeners) - finally: - self._listeners_lock.release() - - for cb, mask in listeners: - if event.code & mask: - try: - cb(event) - except: - logger.exception('Error notifying listener') - - def _real_add_job(self, job, jobstore, wakeup): - job.compute_next_run_time(datetime.now()) - if not job.next_run_time: - raise ValueError('Not adding job since it would never be run') - - self._jobstores_lock.acquire() - try: - try: - store = self._jobstores[jobstore] - except KeyError: - raise KeyError('No such job store: %s' % jobstore) - store.add_job(job) - finally: - self._jobstores_lock.release() - - # Notify listeners that a new job has been added - event = JobStoreEvent(EVENT_JOBSTORE_JOB_ADDED, jobstore, job) - self._notify_listeners(event) - - logger.info('Added job "%s" to job store "%s"', job, jobstore) - - # Notify the scheduler about the new job - if wakeup: - self._wakeup.set() - - def add_job(self, trigger, func, args, kwargs, jobstore='default', - **options): - """ - Adds the given job to the job list and notifies the scheduler thread. - - :param trigger: alias of the job store to store the job in - :param func: callable to run at the given time - :param args: list of positional arguments to call func with - :param kwargs: dict of keyword arguments to call func with - :param jobstore: alias of the job store to store the job in - :rtype: :class:`~apscheduler.job.Job` - """ - job = Job(trigger, func, args or [], kwargs or {}, - options.pop('misfire_grace_time', self.misfire_grace_time), - options.pop('coalesce', self.coalesce), **options) - if not self.running: - self._pending_jobs.append((job, jobstore)) - logger.info('Adding job tentatively -- it will be properly ' - 'scheduled when the scheduler starts') - else: - self._real_add_job(job, jobstore, True) - return job - - def _remove_job(self, job, alias, jobstore): - jobstore.remove_job(job) - - # Notify listeners that a job has been removed - event = JobStoreEvent(EVENT_JOBSTORE_JOB_REMOVED, alias, job) - self._notify_listeners(event) - - logger.info('Removed job "%s"', job) - - def add_date_job(self, func, date, args=None, kwargs=None, **options): - """ - Schedules a job to be completed on a specific date and time. - - :param func: callable to run at the given time - :param date: the date/time to run the job at - :param name: name of the job - :param jobstore: stored the job in the named (or given) job store - :param misfire_grace_time: seconds after the designated run time that - the job is still allowed to be run - :type date: :class:`datetime.date` - :rtype: :class:`~apscheduler.job.Job` - """ - trigger = SimpleTrigger(date) - return self.add_job(trigger, func, args, kwargs, **options) - - def add_interval_job(self, func, weeks=0, days=0, hours=0, minutes=0, - seconds=0, start_date=None, args=None, kwargs=None, - **options): - """ - Schedules a job to be completed on specified intervals. - - :param func: callable to run - :param weeks: number of weeks to wait - :param days: number of days to wait - :param hours: number of hours to wait - :param minutes: number of minutes to wait - :param seconds: number of seconds to wait - :param start_date: when to first execute the job and start the - counter (default is after the given interval) - :param args: list of positional arguments to call func with - :param kwargs: dict of keyword arguments to call func with - :param name: name of the job - :param jobstore: alias of the job store to add the job to - :param misfire_grace_time: seconds after the designated run time that - the job is still allowed to be run - :rtype: :class:`~apscheduler.job.Job` - """ - interval = timedelta(weeks=weeks, days=days, hours=hours, - minutes=minutes, seconds=seconds) - trigger = IntervalTrigger(interval, start_date) - return self.add_job(trigger, func, args, kwargs, **options) - - def add_cron_job(self, func, year='*', month='*', day='*', week='*', - day_of_week='*', hour='*', minute='*', second='*', - start_date=None, args=None, kwargs=None, **options): - """ - Schedules a job to be completed on times that match the given - expressions. - - :param func: callable to run - :param year: year to run on - :param month: month to run on (0 = January) - :param day: day of month to run on - :param week: week of the year to run on - :param day_of_week: weekday to run on (0 = Monday) - :param hour: hour to run on - :param second: second to run on - :param args: list of positional arguments to call func with - :param kwargs: dict of keyword arguments to call func with - :param name: name of the job - :param jobstore: alias of the job store to add the job to - :param misfire_grace_time: seconds after the designated run time that - the job is still allowed to be run - :return: the scheduled job - :rtype: :class:`~apscheduler.job.Job` - """ - trigger = CronTrigger(year=year, month=month, day=day, week=week, - day_of_week=day_of_week, hour=hour, - minute=minute, second=second, - start_date=start_date) - return self.add_job(trigger, func, args, kwargs, **options) - - def cron_schedule(self, **options): - """ - Decorator version of :meth:`add_cron_job`. - This decorator does not wrap its host function. - Unscheduling decorated functions is possible by passing the ``job`` - attribute of the scheduled function to :meth:`unschedule_job`. - """ - def inner(func): - func.job = self.add_cron_job(func, **options) - return func - return inner - - def interval_schedule(self, **options): - """ - Decorator version of :meth:`add_interval_job`. - This decorator does not wrap its host function. - Unscheduling decorated functions is possible by passing the ``job`` - attribute of the scheduled function to :meth:`unschedule_job`. - """ - def inner(func): - func.job = self.add_interval_job(func, **options) - return func - return inner - - def get_jobs(self): - """ - Returns a list of all scheduled jobs. - - :return: list of :class:`~apscheduler.job.Job` objects - """ - self._jobstores_lock.acquire() - try: - jobs = [] - for jobstore in itervalues(self._jobstores): - jobs.extend(jobstore.jobs) - return jobs - finally: - self._jobstores_lock.release() - - def unschedule_job(self, job): - """ - Removes a job, preventing it from being run any more. - """ - self._jobstores_lock.acquire() - try: - for alias, jobstore in iteritems(self._jobstores): - if job in list(jobstore.jobs): - self._remove_job(job, alias, jobstore) - return - finally: - self._jobstores_lock.release() - - raise KeyError('Job "%s" is not scheduled in any job store' % job) - - def unschedule_func(self, func): - """ - Removes all jobs that would execute the given function. - """ - found = False - self._jobstores_lock.acquire() - try: - for alias, jobstore in iteritems(self._jobstores): - for job in list(jobstore.jobs): - if job.func == func: - self._remove_job(job, alias, jobstore) - found = True - finally: - self._jobstores_lock.release() - - if not found: - raise KeyError('The given function is not scheduled in this ' - 'scheduler') - - def print_jobs(self, out=None): - """ - Prints out a textual listing of all jobs currently scheduled on this - scheduler. - - :param out: a file-like object to print to (defaults to **sys.stdout** - if nothing is given) - """ - out = out or sys.stdout - job_strs = [] - self._jobstores_lock.acquire() - try: - for alias, jobstore in iteritems(self._jobstores): - job_strs.append('Jobstore %s:' % alias) - if jobstore.jobs: - for job in jobstore.jobs: - job_strs.append(' %s' % job) - else: - job_strs.append(' No scheduled jobs') - finally: - self._jobstores_lock.release() - - out.write(os.linesep.join(job_strs)) - - def _run_job(self, job, run_times): - """ - Acts as a harness that runs the actual job code in a thread. - """ - for run_time in run_times: - # See if the job missed its run time window, and handle possible - # misfires accordingly - difference = datetime.now() - run_time - grace_time = timedelta(seconds=job.misfire_grace_time) - if difference > grace_time: - # Notify listeners about a missed run - event = JobEvent(EVENT_JOB_MISSED, job, run_time) - self._notify_listeners(event) - logger.warning('Run time of job "%s" was missed by %s', - job, difference) - else: - try: - job.add_instance() - except MaxInstancesReachedError: - event = JobEvent(EVENT_JOB_MISSED, job, run_time) - self._notify_listeners(event) - logger.warning('Execution of job "%s" skipped: ' - 'maximum number of running instances ' - 'reached (%d)', job, job.max_instances) - break - - logger.info('Running job "%s" (scheduled at %s)', job, - run_time) - - try: - retval = job.func(*job.args, **job.kwargs) - except: - # Notify listeners about the exception - exc, tb = sys.exc_info()[1:] - event = JobEvent(EVENT_JOB_ERROR, job, run_time, - exception=exc, traceback=tb) - self._notify_listeners(event) - - logger.exception('Job "%s" raised an exception', job) - else: - # Notify listeners about successful execution - event = JobEvent(EVENT_JOB_EXECUTED, job, run_time, - retval=retval) - self._notify_listeners(event) - - logger.info('Job "%s" executed successfully', job) - - job.remove_instance() - - # If coalescing is enabled, don't attempt any further runs - if job.coalesce: - break - - def _process_jobs(self, now): - """ - Iterates through jobs in every jobstore, starts pending jobs - and figures out the next wakeup time. - """ - next_wakeup_time = None - self._jobstores_lock.acquire() - try: - for alias, jobstore in iteritems(self._jobstores): - for job in tuple(jobstore.jobs): - run_times = job.get_run_times(now) - if run_times: - self._threadpool.submit(self._run_job, job, run_times) - - # Increase the job's run count - if job.coalesce: - job.runs += 1 - else: - job.runs += len(run_times) - - # Update the job, but don't keep finished jobs around - if job.compute_next_run_time(now + timedelta(microseconds=1)): - jobstore.update_job(job) - else: - self._remove_job(job, alias, jobstore) - - if not next_wakeup_time: - next_wakeup_time = job.next_run_time - elif job.next_run_time: - next_wakeup_time = min(next_wakeup_time, - job.next_run_time) - return next_wakeup_time - finally: - self._jobstores_lock.release() - - def _main_loop(self): - """Executes jobs on schedule.""" - - logger.info('Scheduler started') - self._notify_listeners(SchedulerEvent(EVENT_SCHEDULER_START)) - - self._wakeup.clear() - while not self._stopped: - logger.debug('Looking for jobs to run') - now = datetime.now() - next_wakeup_time = self._process_jobs(now) - - # Sleep until the next job is scheduled to be run, - # a new job is added or the scheduler is stopped - if next_wakeup_time is not None: - wait_seconds = time_difference(next_wakeup_time, now) - logger.debug('Next wakeup is due at %s (in %f seconds)', - next_wakeup_time, wait_seconds) - self._wakeup.wait(wait_seconds) - else: - logger.debug('No jobs; waiting until a job is added') - self._wakeup.wait() - self._wakeup.clear() - - logger.info('Scheduler has been shut down') - self._notify_listeners(SchedulerEvent(EVENT_SCHEDULER_SHUTDOWN)) diff --git a/lib/apscheduler/schedulers/__init__.py b/lib/apscheduler/schedulers/__init__.py new file mode 100644 index 00000000..bd8a7900 --- /dev/null +++ b/lib/apscheduler/schedulers/__init__.py @@ -0,0 +1,12 @@ +class SchedulerAlreadyRunningError(Exception): + """Raised when attempting to start or configure the scheduler when it's already running.""" + + def __str__(self): + return 'Scheduler is already running' + + +class SchedulerNotRunningError(Exception): + """Raised when attempting to shutdown the scheduler when it's not running.""" + + def __str__(self): + return 'Scheduler is not running' diff --git a/lib/apscheduler/schedulers/asyncio.py b/lib/apscheduler/schedulers/asyncio.py new file mode 100644 index 00000000..a272b1a2 --- /dev/null +++ b/lib/apscheduler/schedulers/asyncio.py @@ -0,0 +1,67 @@ +from __future__ import absolute_import +from functools import wraps + +from apscheduler.schedulers.base import BaseScheduler +from apscheduler.util import maybe_ref + +try: + import asyncio +except ImportError: # pragma: nocover + try: + import trollius as asyncio + except ImportError: + raise ImportError( + 'AsyncIOScheduler requires either Python 3.4 or the asyncio package installed') + + +def run_in_event_loop(func): + @wraps(func) + def wrapper(self, *args): + self._eventloop.call_soon_threadsafe(func, self, *args) + return wrapper + + +class AsyncIOScheduler(BaseScheduler): + """ + A scheduler that runs on an asyncio (:pep:`3156`) event loop. + + The default executor can run jobs based on native coroutines (``async def``). + + Extra options: + + ============== ============================================================= + ``event_loop`` AsyncIO event loop to use (defaults to the global event loop) + ============== ============================================================= + """ + + _eventloop = None + _timeout = None + + @run_in_event_loop + def shutdown(self, wait=True): + super(AsyncIOScheduler, self).shutdown(wait) + self._stop_timer() + + def _configure(self, config): + self._eventloop = maybe_ref(config.pop('event_loop', None)) or asyncio.get_event_loop() + super(AsyncIOScheduler, self)._configure(config) + + def _start_timer(self, wait_seconds): + self._stop_timer() + if wait_seconds is not None: + self._timeout = self._eventloop.call_later(wait_seconds, self.wakeup) + + def _stop_timer(self): + if self._timeout: + self._timeout.cancel() + del self._timeout + + @run_in_event_loop + def wakeup(self): + self._stop_timer() + wait_seconds = self._process_jobs() + self._start_timer(wait_seconds) + + def _create_default_executor(self): + from apscheduler.executors.asyncio import AsyncIOExecutor + return AsyncIOExecutor() diff --git a/lib/apscheduler/schedulers/background.py b/lib/apscheduler/schedulers/background.py new file mode 100644 index 00000000..03f29822 --- /dev/null +++ b/lib/apscheduler/schedulers/background.py @@ -0,0 +1,41 @@ +from __future__ import absolute_import + +from threading import Thread, Event + +from apscheduler.schedulers.base import BaseScheduler +from apscheduler.schedulers.blocking import BlockingScheduler +from apscheduler.util import asbool + + +class BackgroundScheduler(BlockingScheduler): + """ + A scheduler that runs in the background using a separate thread + (:meth:`~apscheduler.schedulers.base.BaseScheduler.start` will return immediately). + + Extra options: + + ========== ============================================================================= + ``daemon`` Set the ``daemon`` option in the background thread (defaults to ``True``, see + `the documentation + `_ + for further details) + ========== ============================================================================= + """ + + _thread = None + + def _configure(self, config): + self._daemon = asbool(config.pop('daemon', True)) + super(BackgroundScheduler, self)._configure(config) + + def start(self, *args, **kwargs): + self._event = Event() + BaseScheduler.start(self, *args, **kwargs) + self._thread = Thread(target=self._main_loop, name='APScheduler') + self._thread.daemon = self._daemon + self._thread.start() + + def shutdown(self, *args, **kwargs): + super(BackgroundScheduler, self).shutdown(*args, **kwargs) + self._thread.join() + del self._thread diff --git a/lib/apscheduler/schedulers/base.py b/lib/apscheduler/schedulers/base.py new file mode 100644 index 00000000..93269092 --- /dev/null +++ b/lib/apscheduler/schedulers/base.py @@ -0,0 +1,1006 @@ +from __future__ import print_function + +from abc import ABCMeta, abstractmethod +from collections import MutableMapping +from threading import RLock +from datetime import datetime, timedelta +from logging import getLogger +import warnings +import sys + +from pkg_resources import iter_entry_points +from tzlocal import get_localzone +import six + +from apscheduler.schedulers import SchedulerAlreadyRunningError, SchedulerNotRunningError +from apscheduler.executors.base import MaxInstancesReachedError, BaseExecutor +from apscheduler.executors.pool import ThreadPoolExecutor +from apscheduler.jobstores.base import ConflictingIdError, JobLookupError, BaseJobStore +from apscheduler.jobstores.memory import MemoryJobStore +from apscheduler.job import Job +from apscheduler.triggers.base import BaseTrigger +from apscheduler.util import asbool, asint, astimezone, maybe_ref, timedelta_seconds, undefined +from apscheduler.events import ( + SchedulerEvent, JobEvent, JobSubmissionEvent, EVENT_SCHEDULER_START, EVENT_SCHEDULER_SHUTDOWN, + EVENT_JOBSTORE_ADDED, EVENT_JOBSTORE_REMOVED, EVENT_ALL, EVENT_JOB_MODIFIED, EVENT_JOB_REMOVED, + EVENT_JOB_ADDED, EVENT_EXECUTOR_ADDED, EVENT_EXECUTOR_REMOVED, EVENT_ALL_JOBS_REMOVED, + EVENT_JOB_SUBMITTED, EVENT_JOB_MAX_INSTANCES, EVENT_SCHEDULER_RESUMED, EVENT_SCHEDULER_PAUSED) + +#: constant indicating a scheduler's stopped state +STATE_STOPPED = 0 +#: constant indicating a scheduler's running state (started and processing jobs) +STATE_RUNNING = 1 +#: constant indicating a scheduler's paused state (started but not processing jobs) +STATE_PAUSED = 2 + + +class BaseScheduler(six.with_metaclass(ABCMeta)): + """ + Abstract base class for all schedulers. + + Takes the following keyword arguments: + + :param str|logging.Logger logger: logger to use for the scheduler's logging (defaults to + apscheduler.scheduler) + :param str|datetime.tzinfo timezone: the default time zone (defaults to the local timezone) + :param int|float jobstore_retry_interval: the minimum number of seconds to wait between + retries in the scheduler's main loop if the job store raises an exception when getting + the list of due jobs + :param dict job_defaults: default values for newly added jobs + :param dict jobstores: a dictionary of job store alias -> job store instance or configuration + dict + :param dict executors: a dictionary of executor alias -> executor instance or configuration + dict + + :ivar int state: current running state of the scheduler (one of the following constants from + ``apscheduler.schedulers.base``: ``STATE_STOPPED``, ``STATE_RUNNING``, ``STATE_PAUSED``) + + .. seealso:: :ref:`scheduler-config` + """ + + _trigger_plugins = dict((ep.name, ep) for ep in iter_entry_points('apscheduler.triggers')) + _trigger_classes = {} + _executor_plugins = dict((ep.name, ep) for ep in iter_entry_points('apscheduler.executors')) + _executor_classes = {} + _jobstore_plugins = dict((ep.name, ep) for ep in iter_entry_points('apscheduler.jobstores')) + _jobstore_classes = {} + + # + # Public API + # + + def __init__(self, gconfig={}, **options): + super(BaseScheduler, self).__init__() + self._executors = {} + self._executors_lock = self._create_lock() + self._jobstores = {} + self._jobstores_lock = self._create_lock() + self._listeners = [] + self._listeners_lock = self._create_lock() + self._pending_jobs = [] + self.state = STATE_STOPPED + self.configure(gconfig, **options) + + def configure(self, gconfig={}, prefix='apscheduler.', **options): + """ + Reconfigures the scheduler with the given options. + + Can only be done when the scheduler isn't running. + + :param dict gconfig: a "global" configuration dictionary whose values can be overridden by + keyword arguments to this method + :param str|unicode prefix: pick only those keys from ``gconfig`` that are prefixed with + this string (pass an empty string or ``None`` to use all keys) + :raises SchedulerAlreadyRunningError: if the scheduler is already running + + """ + if self.state != STATE_STOPPED: + raise SchedulerAlreadyRunningError + + # If a non-empty prefix was given, strip it from the keys in the + # global configuration dict + if prefix: + prefixlen = len(prefix) + gconfig = dict((key[prefixlen:], value) for key, value in six.iteritems(gconfig) + if key.startswith(prefix)) + + # Create a structure from the dotted options + # (e.g. "a.b.c = d" -> {'a': {'b': {'c': 'd'}}}) + config = {} + for key, value in six.iteritems(gconfig): + parts = key.split('.') + parent = config + key = parts.pop(0) + while parts: + parent = parent.setdefault(key, {}) + key = parts.pop(0) + parent[key] = value + + # Override any options with explicit keyword arguments + config.update(options) + self._configure(config) + + def start(self, paused=False): + """ + Start the configured executors and job stores and begin processing scheduled jobs. + + :param bool paused: if ``True``, don't start job processing until :meth:`resume` is called + :raises SchedulerAlreadyRunningError: if the scheduler is already running + + """ + if self.state != STATE_STOPPED: + raise SchedulerAlreadyRunningError + + with self._executors_lock: + # Create a default executor if nothing else is configured + if 'default' not in self._executors: + self.add_executor(self._create_default_executor(), 'default') + + # Start all the executors + for alias, executor in six.iteritems(self._executors): + executor.start(self, alias) + + with self._jobstores_lock: + # Create a default job store if nothing else is configured + if 'default' not in self._jobstores: + self.add_jobstore(self._create_default_jobstore(), 'default') + + # Start all the job stores + for alias, store in six.iteritems(self._jobstores): + store.start(self, alias) + + # Schedule all pending jobs + for job, jobstore_alias, replace_existing in self._pending_jobs: + self._real_add_job(job, jobstore_alias, replace_existing) + del self._pending_jobs[:] + + self.state = STATE_PAUSED if paused else STATE_RUNNING + self._logger.info('Scheduler started') + self._dispatch_event(SchedulerEvent(EVENT_SCHEDULER_START)) + + if not paused: + self.wakeup() + + @abstractmethod + def shutdown(self, wait=True): + """ + Shuts down the scheduler, along with its executors and job stores. + + Does not interrupt any currently running jobs. + + :param bool wait: ``True`` to wait until all currently executing jobs have finished + :raises SchedulerNotRunningError: if the scheduler has not been started yet + + """ + if self.state == STATE_STOPPED: + raise SchedulerNotRunningError + + self.state = STATE_STOPPED + + with self._jobstores_lock, self._executors_lock: + # Shut down all executors + for executor in six.itervalues(self._executors): + executor.shutdown(wait) + + # Shut down all job stores + for jobstore in six.itervalues(self._jobstores): + jobstore.shutdown() + + self._logger.info('Scheduler has been shut down') + self._dispatch_event(SchedulerEvent(EVENT_SCHEDULER_SHUTDOWN)) + + def pause(self): + """ + Pause job processing in the scheduler. + + This will prevent the scheduler from waking up to do job processing until :meth:`resume` + is called. It will not however stop any already running job processing. + + """ + if self.state == STATE_STOPPED: + raise SchedulerNotRunningError + elif self.state == STATE_RUNNING: + self.state = STATE_PAUSED + self._logger.info('Paused scheduler job processing') + self._dispatch_event(SchedulerEvent(EVENT_SCHEDULER_PAUSED)) + + def resume(self): + """Resume job processing in the scheduler.""" + if self.state == STATE_STOPPED: + raise SchedulerNotRunningError + elif self.state == STATE_PAUSED: + self.state = STATE_RUNNING + self._logger.info('Resumed scheduler job processing') + self._dispatch_event(SchedulerEvent(EVENT_SCHEDULER_RESUMED)) + self.wakeup() + + @property + def running(self): + """ + Return ``True`` if the scheduler has been started. + + This is a shortcut for ``scheduler.state != STATE_STOPPED``. + + """ + return self.state != STATE_STOPPED + + def add_executor(self, executor, alias='default', **executor_opts): + """ + Adds an executor to this scheduler. + + Any extra keyword arguments will be passed to the executor plugin's constructor, assuming + that the first argument is the name of an executor plugin. + + :param str|unicode|apscheduler.executors.base.BaseExecutor executor: either an executor + instance or the name of an executor plugin + :param str|unicode alias: alias for the scheduler + :raises ValueError: if there is already an executor by the given alias + + """ + with self._executors_lock: + if alias in self._executors: + raise ValueError('This scheduler already has an executor by the alias of "%s"' % + alias) + + if isinstance(executor, BaseExecutor): + self._executors[alias] = executor + elif isinstance(executor, six.string_types): + self._executors[alias] = executor = self._create_plugin_instance( + 'executor', executor, executor_opts) + else: + raise TypeError('Expected an executor instance or a string, got %s instead' % + executor.__class__.__name__) + + # Start the executor right away if the scheduler is running + if self.state != STATE_STOPPED: + executor.start(self, alias) + + self._dispatch_event(SchedulerEvent(EVENT_EXECUTOR_ADDED, alias)) + + def remove_executor(self, alias, shutdown=True): + """ + Removes the executor by the given alias from this scheduler. + + :param str|unicode alias: alias of the executor + :param bool shutdown: ``True`` to shut down the executor after + removing it + + """ + with self._executors_lock: + executor = self._lookup_executor(alias) + del self._executors[alias] + + if shutdown: + executor.shutdown() + + self._dispatch_event(SchedulerEvent(EVENT_EXECUTOR_REMOVED, alias)) + + def add_jobstore(self, jobstore, alias='default', **jobstore_opts): + """ + Adds a job store to this scheduler. + + Any extra keyword arguments will be passed to the job store plugin's constructor, assuming + that the first argument is the name of a job store plugin. + + :param str|unicode|apscheduler.jobstores.base.BaseJobStore jobstore: job store to be added + :param str|unicode alias: alias for the job store + :raises ValueError: if there is already a job store by the given alias + + """ + with self._jobstores_lock: + if alias in self._jobstores: + raise ValueError('This scheduler already has a job store by the alias of "%s"' % + alias) + + if isinstance(jobstore, BaseJobStore): + self._jobstores[alias] = jobstore + elif isinstance(jobstore, six.string_types): + self._jobstores[alias] = jobstore = self._create_plugin_instance( + 'jobstore', jobstore, jobstore_opts) + else: + raise TypeError('Expected a job store instance or a string, got %s instead' % + jobstore.__class__.__name__) + + # Start the job store right away if the scheduler isn't stopped + if self.state != STATE_STOPPED: + jobstore.start(self, alias) + + # Notify listeners that a new job store has been added + self._dispatch_event(SchedulerEvent(EVENT_JOBSTORE_ADDED, alias)) + + # Notify the scheduler so it can scan the new job store for jobs + if self.state != STATE_STOPPED: + self.wakeup() + + def remove_jobstore(self, alias, shutdown=True): + """ + Removes the job store by the given alias from this scheduler. + + :param str|unicode alias: alias of the job store + :param bool shutdown: ``True`` to shut down the job store after removing it + + """ + with self._jobstores_lock: + jobstore = self._lookup_jobstore(alias) + del self._jobstores[alias] + + if shutdown: + jobstore.shutdown() + + self._dispatch_event(SchedulerEvent(EVENT_JOBSTORE_REMOVED, alias)) + + def add_listener(self, callback, mask=EVENT_ALL): + """ + add_listener(callback, mask=EVENT_ALL) + + Adds a listener for scheduler events. + + When a matching event occurs, ``callback`` is executed with the event object as its + sole argument. If the ``mask`` parameter is not provided, the callback will receive events + of all types. + + :param callback: any callable that takes one argument + :param int mask: bitmask that indicates which events should be + listened to + + .. seealso:: :mod:`apscheduler.events` + .. seealso:: :ref:`scheduler-events` + + """ + with self._listeners_lock: + self._listeners.append((callback, mask)) + + def remove_listener(self, callback): + """Removes a previously added event listener.""" + + with self._listeners_lock: + for i, (cb, _) in enumerate(self._listeners): + if callback == cb: + del self._listeners[i] + + def add_job(self, func, trigger=None, args=None, kwargs=None, id=None, name=None, + misfire_grace_time=undefined, coalesce=undefined, max_instances=undefined, + next_run_time=undefined, jobstore='default', executor='default', + replace_existing=False, **trigger_args): + """ + add_job(func, trigger=None, args=None, kwargs=None, id=None, \ + name=None, misfire_grace_time=undefined, coalesce=undefined, \ + max_instances=undefined, next_run_time=undefined, \ + jobstore='default', executor='default', \ + replace_existing=False, **trigger_args) + + Adds the given job to the job list and wakes up the scheduler if it's already running. + + Any option that defaults to ``undefined`` will be replaced with the corresponding default + value when the job is scheduled (which happens when the scheduler is started, or + immediately if the scheduler is already running). + + The ``func`` argument can be given either as a callable object or a textual reference in + the ``package.module:some.object`` format, where the first half (separated by ``:``) is an + importable module and the second half is a reference to the callable object, relative to + the module. + + The ``trigger`` argument can either be: + #. the alias name of the trigger (e.g. ``date``, ``interval`` or ``cron``), in which case + any extra keyword arguments to this method are passed on to the trigger's constructor + #. an instance of a trigger class + + :param func: callable (or a textual reference to one) to run at the given time + :param str|apscheduler.triggers.base.BaseTrigger trigger: trigger that determines when + ``func`` is called + :param list|tuple args: list of positional arguments to call func with + :param dict kwargs: dict of keyword arguments to call func with + :param str|unicode id: explicit identifier for the job (for modifying it later) + :param str|unicode name: textual description of the job + :param int misfire_grace_time: seconds after the designated runtime that the job is still + allowed to be run + :param bool coalesce: run once instead of many times if the scheduler determines that the + job should be run more than once in succession + :param int max_instances: maximum number of concurrently running instances allowed for this + job + :param datetime next_run_time: when to first run the job, regardless of the trigger (pass + ``None`` to add the job as paused) + :param str|unicode jobstore: alias of the job store to store the job in + :param str|unicode executor: alias of the executor to run the job with + :param bool replace_existing: ``True`` to replace an existing job with the same ``id`` + (but retain the number of runs from the existing one) + :rtype: Job + + """ + job_kwargs = { + 'trigger': self._create_trigger(trigger, trigger_args), + 'executor': executor, + 'func': func, + 'args': tuple(args) if args is not None else (), + 'kwargs': dict(kwargs) if kwargs is not None else {}, + 'id': id, + 'name': name, + 'misfire_grace_time': misfire_grace_time, + 'coalesce': coalesce, + 'max_instances': max_instances, + 'next_run_time': next_run_time + } + job_kwargs = dict((key, value) for key, value in six.iteritems(job_kwargs) if + value is not undefined) + job = Job(self, **job_kwargs) + + # Don't really add jobs to job stores before the scheduler is up and running + with self._jobstores_lock: + if self.state == STATE_STOPPED: + self._pending_jobs.append((job, jobstore, replace_existing)) + self._logger.info('Adding job tentatively -- it will be properly scheduled when ' + 'the scheduler starts') + else: + self._real_add_job(job, jobstore, replace_existing) + + return job + + def scheduled_job(self, trigger, args=None, kwargs=None, id=None, name=None, + misfire_grace_time=undefined, coalesce=undefined, max_instances=undefined, + next_run_time=undefined, jobstore='default', executor='default', + **trigger_args): + """ + scheduled_job(trigger, args=None, kwargs=None, id=None, \ + name=None, misfire_grace_time=undefined, \ + coalesce=undefined, max_instances=undefined, \ + next_run_time=undefined, jobstore='default', \ + executor='default',**trigger_args) + + A decorator version of :meth:`add_job`, except that ``replace_existing`` is always + ``True``. + + .. important:: The ``id`` argument must be given if scheduling a job in a persistent job + store. The scheduler cannot, however, enforce this requirement. + + """ + def inner(func): + self.add_job(func, trigger, args, kwargs, id, name, misfire_grace_time, coalesce, + max_instances, next_run_time, jobstore, executor, True, **trigger_args) + return func + return inner + + def modify_job(self, job_id, jobstore=None, **changes): + """ + Modifies the properties of a single job. + + Modifications are passed to this method as extra keyword arguments. + + :param str|unicode job_id: the identifier of the job + :param str|unicode jobstore: alias of the job store that contains the job + :return Job: the relevant job instance + + """ + with self._jobstores_lock: + job, jobstore = self._lookup_job(job_id, jobstore) + job._modify(**changes) + if jobstore: + self._lookup_jobstore(jobstore).update_job(job) + + self._dispatch_event(JobEvent(EVENT_JOB_MODIFIED, job_id, jobstore)) + + # Wake up the scheduler since the job's next run time may have been changed + if self.state == STATE_RUNNING: + self.wakeup() + + return job + + def reschedule_job(self, job_id, jobstore=None, trigger=None, **trigger_args): + """ + Constructs a new trigger for a job and updates its next run time. + + Extra keyword arguments are passed directly to the trigger's constructor. + + :param str|unicode job_id: the identifier of the job + :param str|unicode jobstore: alias of the job store that contains the job + :param trigger: alias of the trigger type or a trigger instance + :return Job: the relevant job instance + + """ + trigger = self._create_trigger(trigger, trigger_args) + now = datetime.now(self.timezone) + next_run_time = trigger.get_next_fire_time(None, now) + return self.modify_job(job_id, jobstore, trigger=trigger, next_run_time=next_run_time) + + def pause_job(self, job_id, jobstore=None): + """ + Causes the given job not to be executed until it is explicitly resumed. + + :param str|unicode job_id: the identifier of the job + :param str|unicode jobstore: alias of the job store that contains the job + :return Job: the relevant job instance + + """ + return self.modify_job(job_id, jobstore, next_run_time=None) + + def resume_job(self, job_id, jobstore=None): + """ + Resumes the schedule of the given job, or removes the job if its schedule is finished. + + :param str|unicode job_id: the identifier of the job + :param str|unicode jobstore: alias of the job store that contains the job + :return Job|None: the relevant job instance if the job was rescheduled, or ``None`` if no + next run time could be calculated and the job was removed + + """ + with self._jobstores_lock: + job, jobstore = self._lookup_job(job_id, jobstore) + now = datetime.now(self.timezone) + next_run_time = job.trigger.get_next_fire_time(None, now) + if next_run_time: + return self.modify_job(job_id, jobstore, next_run_time=next_run_time) + else: + self.remove_job(job.id, jobstore) + + def get_jobs(self, jobstore=None, pending=None): + """ + Returns a list of pending jobs (if the scheduler hasn't been started yet) and scheduled + jobs, either from a specific job store or from all of them. + + If the scheduler has not been started yet, only pending jobs can be returned because the + job stores haven't been started yet either. + + :param str|unicode jobstore: alias of the job store + :param bool pending: **DEPRECATED** + :rtype: list[Job] + + """ + if pending is not None: + warnings.warn('The "pending" option is deprecated -- get_jobs() always returns ' + 'pending jobs if the scheduler has been started and scheduled jobs ' + 'otherwise', DeprecationWarning) + + with self._jobstores_lock: + jobs = [] + if self.state == STATE_STOPPED: + for job, alias, replace_existing in self._pending_jobs: + if jobstore is None or alias == jobstore: + jobs.append(job) + else: + for alias, store in six.iteritems(self._jobstores): + if jobstore is None or alias == jobstore: + jobs.extend(store.get_all_jobs()) + + return jobs + + def get_job(self, job_id, jobstore=None): + """ + Returns the Job that matches the given ``job_id``. + + :param str|unicode job_id: the identifier of the job + :param str|unicode jobstore: alias of the job store that most likely contains the job + :return: the Job by the given ID, or ``None`` if it wasn't found + :rtype: Job + + """ + with self._jobstores_lock: + try: + return self._lookup_job(job_id, jobstore)[0] + except JobLookupError: + return + + def remove_job(self, job_id, jobstore=None): + """ + Removes a job, preventing it from being run any more. + + :param str|unicode job_id: the identifier of the job + :param str|unicode jobstore: alias of the job store that contains the job + :raises JobLookupError: if the job was not found + + """ + jobstore_alias = None + with self._jobstores_lock: + if self.state == STATE_STOPPED: + # Check if the job is among the pending jobs + if self.state == STATE_STOPPED: + for i, (job, alias, replace_existing) in enumerate(self._pending_jobs): + if job.id == job_id and jobstore in (None, alias): + del self._pending_jobs[i] + jobstore_alias = alias + break + else: + # Otherwise, try to remove it from each store until it succeeds or we run out of + # stores to check + for alias, store in six.iteritems(self._jobstores): + if jobstore in (None, alias): + try: + store.remove_job(job_id) + jobstore_alias = alias + break + except JobLookupError: + continue + + if jobstore_alias is None: + raise JobLookupError(job_id) + + # Notify listeners that a job has been removed + event = JobEvent(EVENT_JOB_REMOVED, job_id, jobstore_alias) + self._dispatch_event(event) + + self._logger.info('Removed job %s', job_id) + + def remove_all_jobs(self, jobstore=None): + """ + Removes all jobs from the specified job store, or all job stores if none is given. + + :param str|unicode jobstore: alias of the job store + + """ + with self._jobstores_lock: + if self.state == STATE_STOPPED: + if jobstore: + self._pending_jobs = [pending for pending in self._pending_jobs if + pending[1] != jobstore] + else: + self._pending_jobs = [] + else: + for alias, store in six.iteritems(self._jobstores): + if jobstore in (None, alias): + store.remove_all_jobs() + + self._dispatch_event(SchedulerEvent(EVENT_ALL_JOBS_REMOVED, jobstore)) + + def print_jobs(self, jobstore=None, out=None): + """ + print_jobs(jobstore=None, out=sys.stdout) + + Prints out a textual listing of all jobs currently scheduled on either all job stores or + just a specific one. + + :param str|unicode jobstore: alias of the job store, ``None`` to list jobs from all stores + :param file out: a file-like object to print to (defaults to **sys.stdout** if nothing is + given) + + """ + out = out or sys.stdout + with self._jobstores_lock: + if self.state == STATE_STOPPED: + print(u'Pending jobs:', file=out) + if self._pending_jobs: + for job, jobstore_alias, replace_existing in self._pending_jobs: + if jobstore in (None, jobstore_alias): + print(u' %s' % job, file=out) + else: + print(u' No pending jobs', file=out) + else: + for alias, store in sorted(six.iteritems(self._jobstores)): + if jobstore in (None, alias): + print(u'Jobstore %s:' % alias, file=out) + jobs = store.get_all_jobs() + if jobs: + for job in jobs: + print(u' %s' % job, file=out) + else: + print(u' No scheduled jobs', file=out) + + @abstractmethod + def wakeup(self): + """ + Notifies the scheduler that there may be jobs due for execution. + Triggers :meth:`_process_jobs` to be run in an implementation specific manner. + """ + + # + # Private API + # + + def _configure(self, config): + # Set general options + self._logger = maybe_ref(config.pop('logger', None)) or getLogger('apscheduler.scheduler') + self.timezone = astimezone(config.pop('timezone', None)) or get_localzone() + self.jobstore_retry_interval = float(config.pop('jobstore_retry_interval', 10)) + + # Set the job defaults + job_defaults = config.get('job_defaults', {}) + self._job_defaults = { + 'misfire_grace_time': asint(job_defaults.get('misfire_grace_time', 1)), + 'coalesce': asbool(job_defaults.get('coalesce', True)), + 'max_instances': asint(job_defaults.get('max_instances', 1)) + } + + # Configure executors + self._executors.clear() + for alias, value in six.iteritems(config.get('executors', {})): + if isinstance(value, BaseExecutor): + self.add_executor(value, alias) + elif isinstance(value, MutableMapping): + executor_class = value.pop('class', None) + plugin = value.pop('type', None) + if plugin: + executor = self._create_plugin_instance('executor', plugin, value) + elif executor_class: + cls = maybe_ref(executor_class) + executor = cls(**value) + else: + raise ValueError( + 'Cannot create executor "%s" -- either "type" or "class" must be defined' % + alias) + + self.add_executor(executor, alias) + else: + raise TypeError( + "Expected executor instance or dict for executors['%s'], got %s instead" % + (alias, value.__class__.__name__)) + + # Configure job stores + self._jobstores.clear() + for alias, value in six.iteritems(config.get('jobstores', {})): + if isinstance(value, BaseJobStore): + self.add_jobstore(value, alias) + elif isinstance(value, MutableMapping): + jobstore_class = value.pop('class', None) + plugin = value.pop('type', None) + if plugin: + jobstore = self._create_plugin_instance('jobstore', plugin, value) + elif jobstore_class: + cls = maybe_ref(jobstore_class) + jobstore = cls(**value) + else: + raise ValueError( + 'Cannot create job store "%s" -- either "type" or "class" must be ' + 'defined' % alias) + + self.add_jobstore(jobstore, alias) + else: + raise TypeError( + "Expected job store instance or dict for jobstores['%s'], got %s instead" % + (alias, value.__class__.__name__)) + + def _create_default_executor(self): + """Creates a default executor store, specific to the particular scheduler type.""" + return ThreadPoolExecutor() + + def _create_default_jobstore(self): + """Creates a default job store, specific to the particular scheduler type.""" + return MemoryJobStore() + + def _lookup_executor(self, alias): + """ + Returns the executor instance by the given name from the list of executors that were added + to this scheduler. + + :type alias: str + :raises KeyError: if no executor by the given alias is not found + + """ + try: + return self._executors[alias] + except KeyError: + raise KeyError('No such executor: %s' % alias) + + def _lookup_jobstore(self, alias): + """ + Returns the job store instance by the given name from the list of job stores that were + added to this scheduler. + + :type alias: str + :raises KeyError: if no job store by the given alias is not found + + """ + try: + return self._jobstores[alias] + except KeyError: + raise KeyError('No such job store: %s' % alias) + + def _lookup_job(self, job_id, jobstore_alias): + """ + Finds a job by its ID. + + :type job_id: str + :param str jobstore_alias: alias of a job store to look in + :return tuple[Job, str]: a tuple of job, jobstore alias (jobstore alias is None in case of + a pending job) + :raises JobLookupError: if no job by the given ID is found. + + """ + if self.state == STATE_STOPPED: + # Check if the job is among the pending jobs + for job, alias, replace_existing in self._pending_jobs: + if job.id == job_id: + return job, None + else: + # Look in all job stores + for alias, store in six.iteritems(self._jobstores): + if jobstore_alias in (None, alias): + job = store.lookup_job(job_id) + if job is not None: + return job, alias + + raise JobLookupError(job_id) + + def _dispatch_event(self, event): + """ + Dispatches the given event to interested listeners. + + :param SchedulerEvent event: the event to send + + """ + with self._listeners_lock: + listeners = tuple(self._listeners) + + for cb, mask in listeners: + if event.code & mask: + try: + cb(event) + except: + self._logger.exception('Error notifying listener') + + def _real_add_job(self, job, jobstore_alias, replace_existing): + """ + :param Job job: the job to add + :param bool replace_existing: ``True`` to use update_job() in case the job already exists + in the store + + """ + # Fill in undefined values with defaults + replacements = {} + for key, value in six.iteritems(self._job_defaults): + if not hasattr(job, key): + replacements[key] = value + + # Calculate the next run time if there is none defined + if not hasattr(job, 'next_run_time'): + now = datetime.now(self.timezone) + replacements['next_run_time'] = job.trigger.get_next_fire_time(None, now) + + # Apply any replacements + job._modify(**replacements) + + # Add the job to the given job store + store = self._lookup_jobstore(jobstore_alias) + try: + store.add_job(job) + except ConflictingIdError: + if replace_existing: + store.update_job(job) + else: + raise + + # Mark the job as no longer pending + job._jobstore_alias = jobstore_alias + + # Notify listeners that a new job has been added + event = JobEvent(EVENT_JOB_ADDED, job.id, jobstore_alias) + self._dispatch_event(event) + + self._logger.info('Added job "%s" to job store "%s"', job.name, jobstore_alias) + + # Notify the scheduler about the new job + if self.state == STATE_RUNNING: + self.wakeup() + + def _create_plugin_instance(self, type_, alias, constructor_kwargs): + """Creates an instance of the given plugin type, loading the plugin first if necessary.""" + plugin_container, class_container, base_class = { + 'trigger': (self._trigger_plugins, self._trigger_classes, BaseTrigger), + 'jobstore': (self._jobstore_plugins, self._jobstore_classes, BaseJobStore), + 'executor': (self._executor_plugins, self._executor_classes, BaseExecutor) + }[type_] + + try: + plugin_cls = class_container[alias] + except KeyError: + if alias in plugin_container: + plugin_cls = class_container[alias] = plugin_container[alias].load() + if not issubclass(plugin_cls, base_class): + raise TypeError('The {0} entry point does not point to a {0} class'. + format(type_)) + else: + raise LookupError('No {0} by the name "{1}" was found'.format(type_, alias)) + + return plugin_cls(**constructor_kwargs) + + def _create_trigger(self, trigger, trigger_args): + if isinstance(trigger, BaseTrigger): + return trigger + elif trigger is None: + trigger = 'date' + elif not isinstance(trigger, six.string_types): + raise TypeError('Expected a trigger instance or string, got %s instead' % + trigger.__class__.__name__) + + # Use the scheduler's time zone if nothing else is specified + trigger_args.setdefault('timezone', self.timezone) + + # Instantiate the trigger class + return self._create_plugin_instance('trigger', trigger, trigger_args) + + def _create_lock(self): + """Creates a reentrant lock object.""" + return RLock() + + def _process_jobs(self): + """ + Iterates through jobs in every jobstore, starts jobs that are due and figures out how long + to wait for the next round. + + If the ``get_due_jobs()`` call raises an exception, a new wakeup is scheduled in at least + ``jobstore_retry_interval`` seconds. + + """ + if self.state == STATE_PAUSED: + self._logger.debug('Scheduler is paused -- not processing jobs') + return None + + self._logger.debug('Looking for jobs to run') + now = datetime.now(self.timezone) + next_wakeup_time = None + events = [] + + with self._jobstores_lock: + for jobstore_alias, jobstore in six.iteritems(self._jobstores): + try: + due_jobs = jobstore.get_due_jobs(now) + except Exception as e: + # Schedule a wakeup at least in jobstore_retry_interval seconds + self._logger.warning('Error getting due jobs from job store %r: %s', + jobstore_alias, e) + retry_wakeup_time = now + timedelta(seconds=self.jobstore_retry_interval) + if not next_wakeup_time or next_wakeup_time > retry_wakeup_time: + next_wakeup_time = retry_wakeup_time + + continue + + for job in due_jobs: + # Look up the job's executor + try: + executor = self._lookup_executor(job.executor) + except: + self._logger.error( + 'Executor lookup ("%s") failed for job "%s" -- removing it from the ' + 'job store', job.executor, job) + self.remove_job(job.id, jobstore_alias) + continue + + run_times = job._get_run_times(now) + run_times = run_times[-1:] if run_times and job.coalesce else run_times + if run_times: + try: + executor.submit_job(job, run_times) + except MaxInstancesReachedError: + self._logger.warning( + 'Execution of job "%s" skipped: maximum number of running ' + 'instances reached (%d)', job, job.max_instances) + event = JobSubmissionEvent(EVENT_JOB_MAX_INSTANCES, job.id, + jobstore_alias, run_times) + events.append(event) + except: + self._logger.exception('Error submitting job "%s" to executor "%s"', + job, job.executor) + else: + event = JobSubmissionEvent(EVENT_JOB_SUBMITTED, job.id, jobstore_alias, + run_times) + events.append(event) + + # Update the job if it has a next execution time. + # Otherwise remove it from the job store. + job_next_run = job.trigger.get_next_fire_time(run_times[-1], now) + if job_next_run: + job._modify(next_run_time=job_next_run) + jobstore.update_job(job) + else: + self.remove_job(job.id, jobstore_alias) + + # Set a new next wakeup time if there isn't one yet or + # the jobstore has an even earlier one + jobstore_next_run_time = jobstore.get_next_run_time() + if jobstore_next_run_time and (next_wakeup_time is None or + jobstore_next_run_time < next_wakeup_time): + next_wakeup_time = jobstore_next_run_time.astimezone(self.timezone) + + # Dispatch collected events + for event in events: + self._dispatch_event(event) + + # Determine the delay until this method should be called again + if self.state == STATE_PAUSED: + wait_seconds = None + self._logger.debug('Scheduler is paused; waiting until resume() is called') + elif next_wakeup_time is None: + wait_seconds = None + self._logger.debug('No jobs; waiting until a job is added') + else: + wait_seconds = max(timedelta_seconds(next_wakeup_time - now), 0) + self._logger.debug('Next wakeup is due at %s (in %f seconds)', next_wakeup_time, + wait_seconds) + + return wait_seconds diff --git a/lib/apscheduler/schedulers/blocking.py b/lib/apscheduler/schedulers/blocking.py new file mode 100644 index 00000000..e6171575 --- /dev/null +++ b/lib/apscheduler/schedulers/blocking.py @@ -0,0 +1,33 @@ +from __future__ import absolute_import + +from threading import Event + +from apscheduler.schedulers.base import BaseScheduler, STATE_STOPPED +from apscheduler.util import TIMEOUT_MAX + + +class BlockingScheduler(BaseScheduler): + """ + A scheduler that runs in the foreground + (:meth:`~apscheduler.schedulers.base.BaseScheduler.start` will block). + """ + _event = None + + def start(self, *args, **kwargs): + self._event = Event() + super(BlockingScheduler, self).start(*args, **kwargs) + self._main_loop() + + def shutdown(self, wait=True): + super(BlockingScheduler, self).shutdown(wait) + self._event.set() + + def _main_loop(self): + wait_seconds = TIMEOUT_MAX + while self.state != STATE_STOPPED: + self._event.wait(wait_seconds) + self._event.clear() + wait_seconds = self._process_jobs() + + def wakeup(self): + self._event.set() diff --git a/lib/apscheduler/schedulers/gevent.py b/lib/apscheduler/schedulers/gevent.py new file mode 100644 index 00000000..d48ed74a --- /dev/null +++ b/lib/apscheduler/schedulers/gevent.py @@ -0,0 +1,35 @@ +from __future__ import absolute_import + +from apscheduler.schedulers.blocking import BlockingScheduler +from apscheduler.schedulers.base import BaseScheduler + +try: + from gevent.event import Event + from gevent.lock import RLock + import gevent +except ImportError: # pragma: nocover + raise ImportError('GeventScheduler requires gevent installed') + + +class GeventScheduler(BlockingScheduler): + """A scheduler that runs as a Gevent greenlet.""" + + _greenlet = None + + def start(self, *args, **kwargs): + self._event = Event() + BaseScheduler.start(self, *args, **kwargs) + self._greenlet = gevent.spawn(self._main_loop) + return self._greenlet + + def shutdown(self, *args, **kwargs): + super(GeventScheduler, self).shutdown(*args, **kwargs) + self._greenlet.join() + del self._greenlet + + def _create_lock(self): + return RLock() + + def _create_default_executor(self): + from apscheduler.executors.gevent import GeventExecutor + return GeventExecutor() diff --git a/lib/apscheduler/schedulers/qt.py b/lib/apscheduler/schedulers/qt.py new file mode 100644 index 00000000..092533e9 --- /dev/null +++ b/lib/apscheduler/schedulers/qt.py @@ -0,0 +1,42 @@ +from __future__ import absolute_import + +from apscheduler.schedulers.base import BaseScheduler + +try: + from PyQt5.QtCore import QObject, QTimer +except ImportError: # pragma: nocover + try: + from PyQt4.QtCore import QObject, QTimer + except ImportError: + try: + from PySide.QtCore import QObject, QTimer # flake8: noqa + except ImportError: + raise ImportError('QtScheduler requires either PyQt5, PyQt4 or PySide installed') + + +class QtScheduler(BaseScheduler): + """A scheduler that runs in a Qt event loop.""" + + _timer = None + + def shutdown(self, *args, **kwargs): + super(QtScheduler, self).shutdown(*args, **kwargs) + self._stop_timer() + + def _start_timer(self, wait_seconds): + self._stop_timer() + if wait_seconds is not None: + self._timer = QTimer.singleShot(wait_seconds * 1000, self._process_jobs) + + def _stop_timer(self): + if self._timer: + if self._timer.isActive(): + self._timer.stop() + del self._timer + + def wakeup(self): + self._start_timer(0) + + def _process_jobs(self): + wait_seconds = super(QtScheduler, self)._process_jobs() + self._start_timer(wait_seconds) diff --git a/lib/apscheduler/schedulers/tornado.py b/lib/apscheduler/schedulers/tornado.py new file mode 100644 index 00000000..0a9171f2 --- /dev/null +++ b/lib/apscheduler/schedulers/tornado.py @@ -0,0 +1,63 @@ +from __future__ import absolute_import + +from datetime import timedelta +from functools import wraps + +from apscheduler.schedulers.base import BaseScheduler +from apscheduler.util import maybe_ref + +try: + from tornado.ioloop import IOLoop +except ImportError: # pragma: nocover + raise ImportError('TornadoScheduler requires tornado installed') + + +def run_in_ioloop(func): + @wraps(func) + def wrapper(self, *args, **kwargs): + self._ioloop.add_callback(func, self, *args, **kwargs) + return wrapper + + +class TornadoScheduler(BaseScheduler): + """ + A scheduler that runs on a Tornado IOLoop. + + The default executor can run jobs based on native coroutines (``async def``). + + =========== =============================================================== + ``io_loop`` Tornado IOLoop instance to use (defaults to the global IO loop) + =========== =============================================================== + """ + + _ioloop = None + _timeout = None + + @run_in_ioloop + def shutdown(self, wait=True): + super(TornadoScheduler, self).shutdown(wait) + self._stop_timer() + + def _configure(self, config): + self._ioloop = maybe_ref(config.pop('io_loop', None)) or IOLoop.current() + super(TornadoScheduler, self)._configure(config) + + def _start_timer(self, wait_seconds): + self._stop_timer() + if wait_seconds is not None: + self._timeout = self._ioloop.add_timeout(timedelta(seconds=wait_seconds), self.wakeup) + + def _stop_timer(self): + if self._timeout: + self._ioloop.remove_timeout(self._timeout) + del self._timeout + + def _create_default_executor(self): + from apscheduler.executors.tornado import TornadoExecutor + return TornadoExecutor() + + @run_in_ioloop + def wakeup(self): + self._stop_timer() + wait_seconds = self._process_jobs() + self._start_timer(wait_seconds) diff --git a/lib/apscheduler/schedulers/twisted.py b/lib/apscheduler/schedulers/twisted.py new file mode 100644 index 00000000..6b43a84b --- /dev/null +++ b/lib/apscheduler/schedulers/twisted.py @@ -0,0 +1,62 @@ +from __future__ import absolute_import + +from functools import wraps + +from apscheduler.schedulers.base import BaseScheduler +from apscheduler.util import maybe_ref + +try: + from twisted.internet import reactor as default_reactor +except ImportError: # pragma: nocover + raise ImportError('TwistedScheduler requires Twisted installed') + + +def run_in_reactor(func): + @wraps(func) + def wrapper(self, *args, **kwargs): + self._reactor.callFromThread(func, self, *args, **kwargs) + return wrapper + + +class TwistedScheduler(BaseScheduler): + """ + A scheduler that runs on a Twisted reactor. + + Extra options: + + =========== ======================================================== + ``reactor`` Reactor instance to use (defaults to the global reactor) + =========== ======================================================== + """ + + _reactor = None + _delayedcall = None + + def _configure(self, config): + self._reactor = maybe_ref(config.pop('reactor', default_reactor)) + super(TwistedScheduler, self)._configure(config) + + @run_in_reactor + def shutdown(self, wait=True): + super(TwistedScheduler, self).shutdown(wait) + self._stop_timer() + + def _start_timer(self, wait_seconds): + self._stop_timer() + if wait_seconds is not None: + self._delayedcall = self._reactor.callLater(wait_seconds, self.wakeup) + + def _stop_timer(self): + if self._delayedcall and self._delayedcall.active(): + self._delayedcall.cancel() + del self._delayedcall + + @run_in_reactor + def wakeup(self): + self._stop_timer() + wait_seconds = self._process_jobs() + self._start_timer(wait_seconds) + + def _create_default_executor(self): + from apscheduler.executors.twisted import TwistedExecutor + return TwistedExecutor() diff --git a/lib/apscheduler/threadpool.py b/lib/apscheduler/threadpool.py deleted file mode 100644 index 8ec47da0..00000000 --- a/lib/apscheduler/threadpool.py +++ /dev/null @@ -1,133 +0,0 @@ -""" -Generic thread pool class. Modeled after Java's ThreadPoolExecutor. -Please note that this ThreadPool does *not* fully implement the PEP 3148 -ThreadPool! -""" - -from threading import Thread, Lock, currentThread -from weakref import ref -import logging -import atexit - -try: - from queue import Queue, Empty -except ImportError: - from Queue import Queue, Empty - -logger = logging.getLogger(__name__) -_threadpools = set() - - -# Worker threads are daemonic in order to let the interpreter exit without -# an explicit shutdown of the thread pool. The following trick is necessary -# to allow worker threads to finish cleanly. -def _shutdown_all(): - for pool_ref in tuple(_threadpools): - pool = pool_ref() - if pool: - pool.shutdown() - -atexit.register(_shutdown_all) - - -class ThreadPool(object): - def __init__(self, core_threads=0, max_threads=20, keepalive=1): - """ - :param core_threads: maximum number of persistent threads in the pool - :param max_threads: maximum number of total threads in the pool - :param thread_class: callable that creates a Thread object - :param keepalive: seconds to keep non-core worker threads waiting - for new tasks - """ - self.core_threads = core_threads - self.max_threads = max(max_threads, core_threads, 1) - self.keepalive = keepalive - self._queue = Queue() - self._threads_lock = Lock() - self._threads = set() - self._shutdown = False - - _threadpools.add(ref(self)) - logger.info('Started thread pool with %d core threads and %s maximum ' - 'threads', core_threads, max_threads or 'unlimited') - - def _adjust_threadcount(self): - self._threads_lock.acquire() - try: - if self.num_threads < self.max_threads: - self._add_thread(self.num_threads < self.core_threads) - finally: - self._threads_lock.release() - - def _add_thread(self, core): - t = Thread(target=self._run_jobs, args=(core,)) - t.setDaemon(True) - t.start() - self._threads.add(t) - - def _run_jobs(self, core): - logger.debug('Started worker thread') - block = True - timeout = None - if not core: - block = self.keepalive > 0 - timeout = self.keepalive - - while True: - try: - func, args, kwargs = self._queue.get(block, timeout) - except Empty: - break - - if self._shutdown: - break - - try: - func(*args, **kwargs) - except: - logger.exception('Error in worker thread') - - self._threads_lock.acquire() - self._threads.remove(currentThread()) - self._threads_lock.release() - - logger.debug('Exiting worker thread') - - @property - def num_threads(self): - return len(self._threads) - - def submit(self, func, *args, **kwargs): - if self._shutdown: - raise RuntimeError('Cannot schedule new tasks after shutdown') - - self._queue.put((func, args, kwargs)) - self._adjust_threadcount() - - def shutdown(self, wait=True): - if self._shutdown: - return - - logging.info('Shutting down thread pool') - self._shutdown = True - _threadpools.remove(ref(self)) - - self._threads_lock.acquire() - for _ in range(self.num_threads): - self._queue.put((None, None, None)) - self._threads_lock.release() - - if wait: - self._threads_lock.acquire() - threads = tuple(self._threads) - self._threads_lock.release() - for thread in threads: - thread.join() - - def __repr__(self): - if self.max_threads: - threadcount = '%d/%d' % (self.num_threads, self.max_threads) - else: - threadcount = '%d' % self.num_threads - - return '' % (id(self), threadcount) diff --git a/lib/apscheduler/triggers/__init__.py b/lib/apscheduler/triggers/__init__.py index 74a97884..e69de29b 100644 --- a/lib/apscheduler/triggers/__init__.py +++ b/lib/apscheduler/triggers/__init__.py @@ -1,3 +0,0 @@ -from apscheduler.triggers.cron import CronTrigger -from apscheduler.triggers.interval import IntervalTrigger -from apscheduler.triggers.simple import SimpleTrigger diff --git a/lib/apscheduler/triggers/base.py b/lib/apscheduler/triggers/base.py new file mode 100644 index 00000000..ba98632e --- /dev/null +++ b/lib/apscheduler/triggers/base.py @@ -0,0 +1,19 @@ +from abc import ABCMeta, abstractmethod + +import six + + +class BaseTrigger(six.with_metaclass(ABCMeta)): + """Abstract base class that defines the interface that every trigger must implement.""" + + __slots__ = () + + @abstractmethod + def get_next_fire_time(self, previous_fire_time, now): + """ + Returns the next datetime to fire on, If no such datetime can be calculated, returns + ``None``. + + :param datetime.datetime previous_fire_time: the previous time the trigger was fired + :param datetime.datetime now: current datetime + """ diff --git a/lib/apscheduler/triggers/cron/__init__.py b/lib/apscheduler/triggers/cron/__init__.py index 3f8d9a8f..eccee0c0 100644 --- a/lib/apscheduler/triggers/cron/__init__.py +++ b/lib/apscheduler/triggers/cron/__init__.py @@ -1,32 +1,73 @@ -from datetime import date, datetime +from datetime import datetime, timedelta -from apscheduler.triggers.cron.fields import * -from apscheduler.util import datetime_ceil, convert_to_datetime +from tzlocal import get_localzone +import six + +from apscheduler.triggers.base import BaseTrigger +from apscheduler.triggers.cron.fields import ( + BaseField, WeekField, DayOfMonthField, DayOfWeekField, DEFAULT_VALUES) +from apscheduler.util import datetime_ceil, convert_to_datetime, datetime_repr, astimezone -class CronTrigger(object): - FIELD_NAMES = ('year', 'month', 'day', 'week', 'day_of_week', 'hour', - 'minute', 'second') - FIELDS_MAP = {'year': BaseField, - 'month': BaseField, - 'week': WeekField, - 'day': DayOfMonthField, - 'day_of_week': DayOfWeekField, - 'hour': BaseField, - 'minute': BaseField, - 'second': BaseField} +class CronTrigger(BaseTrigger): + """ + Triggers when current time matches all specified time constraints, + similarly to how the UNIX cron scheduler works. - def __init__(self, **values): - self.start_date = values.pop('start_date', None) - if self.start_date: - self.start_date = convert_to_datetime(self.start_date) + :param int|str year: 4-digit year + :param int|str month: month (1-12) + :param int|str day: day of the (1-31) + :param int|str week: ISO week (1-53) + :param int|str day_of_week: number or name of weekday (0-6 or mon,tue,wed,thu,fri,sat,sun) + :param int|str hour: hour (0-23) + :param int|str minute: minute (0-59) + :param int|str second: second (0-59) + :param datetime|str start_date: earliest possible date/time to trigger on (inclusive) + :param datetime|str end_date: latest possible date/time to trigger on (inclusive) + :param datetime.tzinfo|str timezone: time zone to use for the date/time calculations (defaults + to scheduler timezone) + .. note:: The first weekday is always **monday**. + """ + + FIELD_NAMES = ('year', 'month', 'day', 'week', 'day_of_week', 'hour', 'minute', 'second') + FIELDS_MAP = { + 'year': BaseField, + 'month': BaseField, + 'week': WeekField, + 'day': DayOfMonthField, + 'day_of_week': DayOfWeekField, + 'hour': BaseField, + 'minute': BaseField, + 'second': BaseField + } + + __slots__ = 'timezone', 'start_date', 'end_date', 'fields' + + def __init__(self, year=None, month=None, day=None, week=None, day_of_week=None, hour=None, + minute=None, second=None, start_date=None, end_date=None, timezone=None): + if timezone: + self.timezone = astimezone(timezone) + elif isinstance(start_date, datetime) and start_date.tzinfo: + self.timezone = start_date.tzinfo + elif isinstance(end_date, datetime) and end_date.tzinfo: + self.timezone = end_date.tzinfo + else: + self.timezone = get_localzone() + + self.start_date = convert_to_datetime(start_date, self.timezone, 'start_date') + self.end_date = convert_to_datetime(end_date, self.timezone, 'end_date') + + values = dict((key, value) for (key, value) in six.iteritems(locals()) + if key in self.FIELD_NAMES and value is not None) self.fields = [] + assign_defaults = False for field_name in self.FIELD_NAMES: if field_name in values: exprs = values.pop(field_name) is_default = False - elif not values: + assign_defaults = not values + elif assign_defaults: exprs = DEFAULT_VALUES[field_name] is_default = True else: @@ -39,18 +80,18 @@ class CronTrigger(object): def _increment_field_value(self, dateval, fieldnum): """ - Increments the designated field and resets all less significant fields - to their minimum values. + Increments the designated field and resets all less significant fields to their minimum + values. :type dateval: datetime :type fieldnum: int - :type amount: int + :return: a tuple containing the new date, and the number of the field that was actually + incremented :rtype: tuple - :return: a tuple containing the new date, and the number of the field - that was actually incremented """ - i = 0 + values = {} + i = 0 while i < len(self.fields): field = self.fields[i] if not field.REAL: @@ -77,7 +118,8 @@ class CronTrigger(object): values[field.name] = value + 1 i += 1 - return datetime(**values), fieldnum + difference = datetime(**values) - dateval.replace(tzinfo=None) + return self.timezone.normalize(dateval + difference), fieldnum def _set_field_value(self, dateval, fieldnum, new_value): values = {} @@ -90,13 +132,18 @@ class CronTrigger(object): else: values[field.name] = new_value - return datetime(**values) + return self.timezone.localize(datetime(**values)) + + def get_next_fire_time(self, previous_fire_time, now): + if previous_fire_time: + start_date = min(now, previous_fire_time + timedelta(microseconds=1)) + if start_date == previous_fire_time: + start_date += timedelta(microseconds=1) + else: + start_date = max(now, self.start_date) if self.start_date else now - def get_next_fire_time(self, start_date): - if self.start_date: - start_date = max(start_date, self.start_date) - next_date = datetime_ceil(start_date) fieldnum = 0 + next_date = datetime_ceil(start_date).astimezone(self.timezone) while 0 <= fieldnum < len(self.fields): field = self.fields[fieldnum] curr_value = field.get_value(next_date) @@ -104,32 +151,56 @@ class CronTrigger(object): if next_value is None: # No valid value was found - next_date, fieldnum = self._increment_field_value(next_date, - fieldnum - 1) + next_date, fieldnum = self._increment_field_value(next_date, fieldnum - 1) elif next_value > curr_value: # A valid, but higher than the starting value, was found if field.REAL: - next_date = self._set_field_value(next_date, fieldnum, - next_value) + next_date = self._set_field_value(next_date, fieldnum, next_value) fieldnum += 1 else: - next_date, fieldnum = self._increment_field_value(next_date, - fieldnum) + next_date, fieldnum = self._increment_field_value(next_date, fieldnum) else: # A valid value was found, no changes necessary fieldnum += 1 + # Return if the date has rolled past the end date + if self.end_date and next_date > self.end_date: + return None + if fieldnum >= 0: return next_date + def __getstate__(self): + return { + 'version': 1, + 'timezone': self.timezone, + 'start_date': self.start_date, + 'end_date': self.end_date, + 'fields': self.fields + } + + def __setstate__(self, state): + # This is for compatibility with APScheduler 3.0.x + if isinstance(state, tuple): + state = state[1] + + if state.get('version', 1) > 1: + raise ValueError( + 'Got serialized data for version %s of %s, but only version 1 can be handled' % + (state['version'], self.__class__.__name__)) + + self.timezone = state['timezone'] + self.start_date = state['start_date'] + self.end_date = state['end_date'] + self.fields = state['fields'] + def __str__(self): - options = ["%s='%s'" % (f.name, str(f)) for f in self.fields - if not f.is_default] + options = ["%s='%s'" % (f.name, f) for f in self.fields if not f.is_default] return 'cron[%s]' % (', '.join(options)) def __repr__(self): - options = ["%s='%s'" % (f.name, str(f)) for f in self.fields - if not f.is_default] + options = ["%s='%s'" % (f.name, f) for f in self.fields if not f.is_default] if self.start_date: - options.append("start_date='%s'" % self.start_date.isoformat(' ')) - return '<%s (%s)>' % (self.__class__.__name__, ', '.join(options)) + options.append("start_date='%s'" % datetime_repr(self.start_date)) + return "<%s (%s, timezone='%s')>" % ( + self.__class__.__name__, ', '.join(options), self.timezone) diff --git a/lib/apscheduler/triggers/cron/expressions.py b/lib/apscheduler/triggers/cron/expressions.py index 018c7a30..21493d54 100644 --- a/lib/apscheduler/triggers/cron/expressions.py +++ b/lib/apscheduler/triggers/cron/expressions.py @@ -1,6 +1,4 @@ -""" -This module contains the expressions applicable for CronTrigger's fields. -""" +"""This module contains the expressions applicable for CronTrigger's fields.""" from calendar import monthrange import re @@ -8,7 +6,7 @@ import re from apscheduler.util import asint __all__ = ('AllExpression', 'RangeExpression', 'WeekdayRangeExpression', - 'WeekdayPositionExpression') + 'WeekdayPositionExpression', 'LastDayOfMonthExpression') WEEKDAYS = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun'] @@ -37,6 +35,9 @@ class AllExpression(object): if next <= maxval: return next + def __eq__(self, other): + return isinstance(other, self.__class__) and self.step == other.step + def __str__(self): if self.step: return '*/%d' % self.step @@ -57,30 +58,30 @@ class RangeExpression(AllExpression): if last is None and step is None: last = first if last is not None and first > last: - raise ValueError('The minimum value in a range must not be ' - 'higher than the maximum') + raise ValueError('The minimum value in a range must not be higher than the maximum') self.first = first self.last = last def get_next_value(self, date, field): - start = field.get_value(date) + startval = field.get_value(date) minval = field.get_min(date) maxval = field.get_max(date) # Apply range limits minval = max(minval, self.first) - if self.last is not None: - maxval = min(maxval, self.last) - start = max(start, minval) + maxval = min(maxval, self.last) if self.last is not None else maxval + nextval = max(minval, startval) - if not self.step: - next = start - else: - distance_to_next = (self.step - (start - minval)) % self.step - next = start + distance_to_next + # Apply the step if defined + if self.step: + distance_to_next = (self.step - (nextval - minval)) % self.step + nextval += distance_to_next - if next <= maxval: - return next + return nextval if nextval <= maxval else None + + def __eq__(self, other): + return (isinstance(other, self.__class__) and self.first == other.first and + self.last == other.last) def __str__(self): if self.last != self.first and self.last is not None: @@ -102,8 +103,7 @@ class RangeExpression(AllExpression): class WeekdayRangeExpression(RangeExpression): - value_re = re.compile(r'(?P[a-z]+)(?:-(?P[a-z]+))?', - re.IGNORECASE) + value_re = re.compile(r'(?P[a-z]+)(?:-(?P[a-z]+))?', re.IGNORECASE) def __init__(self, first, last=None): try: @@ -135,8 +135,8 @@ class WeekdayRangeExpression(RangeExpression): class WeekdayPositionExpression(AllExpression): options = ['1st', '2nd', '3rd', '4th', '5th', 'last'] - value_re = re.compile(r'(?P%s) +(?P(?:\d+|\w+))' - % '|'.join(options), re.IGNORECASE) + value_re = re.compile(r'(?P%s) +(?P(?:\d+|\w+))' % + '|'.join(options), re.IGNORECASE) def __init__(self, option_name, weekday_name): try: @@ -150,8 +150,7 @@ class WeekdayPositionExpression(AllExpression): raise ValueError('Invalid weekday name "%s"' % weekday_name) def get_next_value(self, date, field): - # Figure out the weekday of the month's first day and the number - # of days in that month + # Figure out the weekday of the month's first day and the number of days in that month first_day_wday, last_day = monthrange(date.year, date.month) # Calculate which day of the month is the first of the target weekdays @@ -163,16 +162,34 @@ class WeekdayPositionExpression(AllExpression): if self.option_num < 5: target_day = first_hit_day + self.option_num * 7 else: - target_day = first_hit_day + ((last_day - first_hit_day) / 7) * 7 + target_day = first_hit_day + ((last_day - first_hit_day) // 7) * 7 if target_day <= last_day and target_day >= date.day: return target_day + def __eq__(self, other): + return (super(WeekdayPositionExpression, self).__eq__(other) and + self.option_num == other.option_num and self.weekday == other.weekday) + def __str__(self): - return '%s %s' % (self.options[self.option_num], - WEEKDAYS[self.weekday]) + return '%s %s' % (self.options[self.option_num], WEEKDAYS[self.weekday]) def __repr__(self): - return "%s('%s', '%s')" % (self.__class__.__name__, - self.options[self.option_num], + return "%s('%s', '%s')" % (self.__class__.__name__, self.options[self.option_num], WEEKDAYS[self.weekday]) + + +class LastDayOfMonthExpression(AllExpression): + value_re = re.compile(r'last', re.IGNORECASE) + + def __init__(self): + pass + + def get_next_value(self, date, field): + return monthrange(date.year, date.month)[1] + + def __str__(self): + return 'last' + + def __repr__(self): + return "%s()" % self.__class__.__name__ diff --git a/lib/apscheduler/triggers/cron/fields.py b/lib/apscheduler/triggers/cron/fields.py index ef970cc9..892bc13f 100644 --- a/lib/apscheduler/triggers/cron/fields.py +++ b/lib/apscheduler/triggers/cron/fields.py @@ -1,22 +1,22 @@ -""" -Fields represent CronTrigger options which map to :class:`~datetime.datetime` -fields. -""" +"""Fields represent CronTrigger options which map to :class:`~datetime.datetime` fields.""" from calendar import monthrange -from apscheduler.triggers.cron.expressions import * - -__all__ = ('MIN_VALUES', 'MAX_VALUES', 'DEFAULT_VALUES', 'BaseField', - 'WeekField', 'DayOfMonthField', 'DayOfWeekField') +from apscheduler.triggers.cron.expressions import ( + AllExpression, RangeExpression, WeekdayPositionExpression, LastDayOfMonthExpression, + WeekdayRangeExpression) -MIN_VALUES = {'year': 1970, 'month': 1, 'day': 1, 'week': 1, - 'day_of_week': 0, 'hour': 0, 'minute': 0, 'second': 0} -MAX_VALUES = {'year': 2 ** 63, 'month': 12, 'day:': 31, 'week': 53, - 'day_of_week': 6, 'hour': 23, 'minute': 59, 'second': 59} -DEFAULT_VALUES = {'year': '*', 'month': 1, 'day': 1, 'week': '*', - 'day_of_week': '*', 'hour': 0, 'minute': 0, 'second': 0} +__all__ = ('MIN_VALUES', 'MAX_VALUES', 'DEFAULT_VALUES', 'BaseField', 'WeekField', + 'DayOfMonthField', 'DayOfWeekField') + + +MIN_VALUES = {'year': 1970, 'month': 1, 'day': 1, 'week': 1, 'day_of_week': 0, 'hour': 0, + 'minute': 0, 'second': 0} +MAX_VALUES = {'year': 2 ** 63, 'month': 12, 'day:': 31, 'week': 53, 'day_of_week': 6, 'hour': 23, + 'minute': 59, 'second': 59} +DEFAULT_VALUES = {'year': '*', 'month': 1, 'day': 1, 'week': '*', 'day_of_week': '*', 'hour': 0, + 'minute': 0, 'second': 0} class BaseField(object): @@ -65,16 +65,17 @@ class BaseField(object): self.expressions.append(compiled_expr) return - raise ValueError('Unrecognized expression "%s" for field "%s"' % - (expr, self.name)) + raise ValueError('Unrecognized expression "%s" for field "%s"' % (expr, self.name)) + + def __eq__(self, other): + return isinstance(self, self.__class__) and self.expressions == other.expressions def __str__(self): expr_strings = (str(e) for e in self.expressions) return ','.join(expr_strings) def __repr__(self): - return "%s('%s', '%s')" % (self.__class__.__name__, self.name, - str(self)) + return "%s('%s', '%s')" % (self.__class__.__name__, self.name, self) class WeekField(BaseField): @@ -85,7 +86,7 @@ class WeekField(BaseField): class DayOfMonthField(BaseField): - COMPILERS = BaseField.COMPILERS + [WeekdayPositionExpression] + COMPILERS = BaseField.COMPILERS + [WeekdayPositionExpression, LastDayOfMonthExpression] def get_max(self, dateval): return monthrange(dateval.year, dateval.month)[1] diff --git a/lib/apscheduler/triggers/date.py b/lib/apscheduler/triggers/date.py new file mode 100644 index 00000000..07681008 --- /dev/null +++ b/lib/apscheduler/triggers/date.py @@ -0,0 +1,51 @@ +from datetime import datetime + +from tzlocal import get_localzone + +from apscheduler.triggers.base import BaseTrigger +from apscheduler.util import convert_to_datetime, datetime_repr, astimezone + + +class DateTrigger(BaseTrigger): + """ + Triggers once on the given datetime. If ``run_date`` is left empty, current time is used. + + :param datetime|str run_date: the date/time to run the job at + :param datetime.tzinfo|str timezone: time zone for ``run_date`` if it doesn't have one already + """ + + __slots__ = 'run_date' + + def __init__(self, run_date=None, timezone=None): + timezone = astimezone(timezone) or get_localzone() + if run_date is not None: + self.run_date = convert_to_datetime(run_date, timezone, 'run_date') + else: + self.run_date = datetime.now(timezone) + + def get_next_fire_time(self, previous_fire_time, now): + return self.run_date if previous_fire_time is None else None + + def __getstate__(self): + return { + 'version': 1, + 'run_date': self.run_date + } + + def __setstate__(self, state): + # This is for compatibility with APScheduler 3.0.x + if isinstance(state, tuple): + state = state[1] + + if state.get('version', 1) > 1: + raise ValueError( + 'Got serialized data for version %s of %s, but only version 1 can be handled' % + (state['version'], self.__class__.__name__)) + + self.run_date = state['run_date'] + + def __str__(self): + return 'date[%s]' % datetime_repr(self.run_date) + + def __repr__(self): + return "<%s (run_date='%s')>" % (self.__class__.__name__, datetime_repr(self.run_date)) diff --git a/lib/apscheduler/triggers/interval.py b/lib/apscheduler/triggers/interval.py index dd16d777..fec912a2 100644 --- a/lib/apscheduler/triggers/interval.py +++ b/lib/apscheduler/triggers/interval.py @@ -1,39 +1,92 @@ -from datetime import datetime, timedelta +from datetime import timedelta, datetime from math import ceil -from apscheduler.util import convert_to_datetime, timedelta_seconds +from tzlocal import get_localzone + +from apscheduler.triggers.base import BaseTrigger +from apscheduler.util import convert_to_datetime, timedelta_seconds, datetime_repr, astimezone -class IntervalTrigger(object): - def __init__(self, interval, start_date=None): - if not isinstance(interval, timedelta): - raise TypeError('interval must be a timedelta') - if start_date: - start_date = convert_to_datetime(start_date) +class IntervalTrigger(BaseTrigger): + """ + Triggers on specified intervals, starting on ``start_date`` if specified, ``datetime.now()`` + + interval otherwise. - self.interval = interval + :param int weeks: number of weeks to wait + :param int days: number of days to wait + :param int hours: number of hours to wait + :param int minutes: number of minutes to wait + :param int seconds: number of seconds to wait + :param datetime|str start_date: starting point for the interval calculation + :param datetime|str end_date: latest possible date/time to trigger on + :param datetime.tzinfo|str timezone: time zone to use for the date/time calculations + """ + + __slots__ = 'timezone', 'start_date', 'end_date', 'interval', 'interval_length' + + def __init__(self, weeks=0, days=0, hours=0, minutes=0, seconds=0, start_date=None, + end_date=None, timezone=None): + self.interval = timedelta(weeks=weeks, days=days, hours=hours, minutes=minutes, + seconds=seconds) self.interval_length = timedelta_seconds(self.interval) if self.interval_length == 0: self.interval = timedelta(seconds=1) self.interval_length = 1 - if start_date is None: - self.start_date = datetime.now() + self.interval + if timezone: + self.timezone = astimezone(timezone) + elif isinstance(start_date, datetime) and start_date.tzinfo: + self.timezone = start_date.tzinfo + elif isinstance(end_date, datetime) and end_date.tzinfo: + self.timezone = end_date.tzinfo else: - self.start_date = convert_to_datetime(start_date) + self.timezone = get_localzone() - def get_next_fire_time(self, start_date): - if start_date < self.start_date: - return self.start_date + start_date = start_date or (datetime.now(self.timezone) + self.interval) + self.start_date = convert_to_datetime(start_date, self.timezone, 'start_date') + self.end_date = convert_to_datetime(end_date, self.timezone, 'end_date') - timediff_seconds = timedelta_seconds(start_date - self.start_date) - next_interval_num = int(ceil(timediff_seconds / self.interval_length)) - return self.start_date + self.interval * next_interval_num + def get_next_fire_time(self, previous_fire_time, now): + if previous_fire_time: + next_fire_time = previous_fire_time + self.interval + elif self.start_date > now: + next_fire_time = self.start_date + else: + timediff_seconds = timedelta_seconds(now - self.start_date) + next_interval_num = int(ceil(timediff_seconds / self.interval_length)) + next_fire_time = self.start_date + self.interval * next_interval_num + + if not self.end_date or next_fire_time <= self.end_date: + return self.timezone.normalize(next_fire_time) + + def __getstate__(self): + return { + 'version': 1, + 'timezone': self.timezone, + 'start_date': self.start_date, + 'end_date': self.end_date, + 'interval': self.interval + } + + def __setstate__(self, state): + # This is for compatibility with APScheduler 3.0.x + if isinstance(state, tuple): + state = state[1] + + if state.get('version', 1) > 1: + raise ValueError( + 'Got serialized data for version %s of %s, but only version 1 can be handled' % + (state['version'], self.__class__.__name__)) + + self.timezone = state['timezone'] + self.start_date = state['start_date'] + self.end_date = state['end_date'] + self.interval = state['interval'] + self.interval_length = timedelta_seconds(self.interval) def __str__(self): return 'interval[%s]' % str(self.interval) def __repr__(self): - return "<%s (interval=%s, start_date=%s)>" % ( - self.__class__.__name__, repr(self.interval), - repr(self.start_date)) + return "<%s (interval=%r, start_date='%s', timezone='%s')>" % ( + self.__class__.__name__, self.interval, datetime_repr(self.start_date), self.timezone) diff --git a/lib/apscheduler/triggers/simple.py b/lib/apscheduler/triggers/simple.py deleted file mode 100644 index ea61b3f1..00000000 --- a/lib/apscheduler/triggers/simple.py +++ /dev/null @@ -1,17 +0,0 @@ -from apscheduler.util import convert_to_datetime - - -class SimpleTrigger(object): - def __init__(self, run_date): - self.run_date = convert_to_datetime(run_date) - - def get_next_fire_time(self, start_date): - if self.run_date >= start_date: - return self.run_date - - def __str__(self): - return 'date[%s]' % str(self.run_date) - - def __repr__(self): - return '<%s (run_date=%s)>' % ( - self.__class__.__name__, repr(self.run_date)) diff --git a/lib/apscheduler/util.py b/lib/apscheduler/util.py index a49aaed8..63ac8ac8 100644 --- a/lib/apscheduler/util.py +++ b/lib/apscheduler/util.py @@ -1,26 +1,50 @@ -""" -This module contains several handy functions primarily meant for internal use. -""" +"""This module contains several handy functions primarily meant for internal use.""" -from datetime import date, datetime, timedelta -from time import mktime +from __future__ import division +from datetime import date, datetime, time, timedelta, tzinfo +from calendar import timegm import re -import sys -from types import MethodType +from functools import partial -__all__ = ('asint', 'asbool', 'convert_to_datetime', 'timedelta_seconds', - 'time_difference', 'datetime_ceil', 'combine_opts', - 'get_callable_name', 'obj_to_ref', 'ref_to_obj', 'maybe_ref', - 'to_unicode', 'iteritems', 'itervalues', 'xrange') +from pytz import timezone, utc +import six + +try: + from inspect import signature +except ImportError: # pragma: nocover + from funcsigs import signature + +try: + from threading import TIMEOUT_MAX +except ImportError: + TIMEOUT_MAX = 4294967 # Maximum value accepted by Event.wait() on Windows + +__all__ = ('asint', 'asbool', 'astimezone', 'convert_to_datetime', 'datetime_to_utc_timestamp', + 'utc_timestamp_to_datetime', 'timedelta_seconds', 'datetime_ceil', 'get_callable_name', + 'obj_to_ref', 'ref_to_obj', 'maybe_ref', 'repr_escape', 'check_callable_args') + + +class _Undefined(object): + def __nonzero__(self): + return False + + def __bool__(self): + return False + + def __repr__(self): + return '' + + +undefined = _Undefined() #: a unique object that only signifies that no value is defined def asint(text): """ - Safely converts a string to an integer, returning None if the string - is None. + Safely converts a string to an integer, returning ``None`` if the string is ``None``. :type text: str :rtype: int + """ if text is not None: return int(text) @@ -31,6 +55,7 @@ def asbool(obj): Interprets an object as a boolean value. :rtype: bool + """ if isinstance(obj, str): obj = obj.strip().lower() @@ -42,36 +67,105 @@ def asbool(obj): return bool(obj) +def astimezone(obj): + """ + Interprets an object as a timezone. + + :rtype: tzinfo + + """ + if isinstance(obj, six.string_types): + return timezone(obj) + if isinstance(obj, tzinfo): + if not hasattr(obj, 'localize') or not hasattr(obj, 'normalize'): + raise TypeError('Only timezones from the pytz library are supported') + if obj.zone == 'local': + raise ValueError( + 'Unable to determine the name of the local timezone -- you must explicitly ' + 'specify the name of the local timezone. Please refrain from using timezones like ' + 'EST to prevent problems with daylight saving time. Instead, use a locale based ' + 'timezone name (such as Europe/Helsinki).') + return obj + if obj is not None: + raise TypeError('Expected tzinfo, got %s instead' % obj.__class__.__name__) + + _DATE_REGEX = re.compile( r'(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})' r'(?: (?P\d{1,2}):(?P\d{1,2}):(?P\d{1,2})' r'(?:\.(?P\d{1,6}))?)?') -def convert_to_datetime(input): +def convert_to_datetime(input, tz, arg_name): """ - Converts the given object to a datetime object, if possible. - If an actual datetime object is passed, it is returned unmodified. - If the input is a string, it is parsed as a datetime. + Converts the given object to a timezone aware datetime object. - Date strings are accepted in three different forms: date only (Y-m-d), - date with time (Y-m-d H:M:S) or with date+time with microseconds - (Y-m-d H:M:S.micro). + If a timezone aware datetime object is passed, it is returned unmodified. + If a native datetime object is passed, it is given the specified timezone. + If the input is a string, it is parsed as a datetime with the given timezone. + Date strings are accepted in three different forms: date only (Y-m-d), date with time + (Y-m-d H:M:S) or with date+time with microseconds (Y-m-d H:M:S.micro). + + :param str|datetime input: the datetime or string to convert to a timezone aware datetime + :param datetime.tzinfo tz: timezone to interpret ``input`` in + :param str arg_name: the name of the argument (used in an error message) :rtype: datetime + """ - if isinstance(input, datetime): - return input + if input is None: + return + elif isinstance(input, datetime): + datetime_ = input elif isinstance(input, date): - return datetime.fromordinal(input.toordinal()) - elif isinstance(input, str): + datetime_ = datetime.combine(input, time()) + elif isinstance(input, six.string_types): m = _DATE_REGEX.match(input) if not m: raise ValueError('Invalid date string') values = [(k, int(v or 0)) for k, v in m.groupdict().items()] values = dict(values) - return datetime(**values) - raise TypeError('Unsupported input type: %s' % type(input)) + datetime_ = datetime(**values) + else: + raise TypeError('Unsupported type for %s: %s' % (arg_name, input.__class__.__name__)) + + if datetime_.tzinfo is not None: + return datetime_ + if tz is None: + raise ValueError( + 'The "tz" argument must be specified if %s has no timezone information' % arg_name) + if isinstance(tz, six.string_types): + tz = timezone(tz) + + try: + return tz.localize(datetime_, is_dst=None) + except AttributeError: + raise TypeError( + 'Only pytz timezones are supported (need the localize() and normalize() methods)') + + +def datetime_to_utc_timestamp(timeval): + """ + Converts a datetime instance to a timestamp. + + :type timeval: datetime + :rtype: float + + """ + if timeval is not None: + return timegm(timeval.utctimetuple()) + timeval.microsecond / 1000000 + + +def utc_timestamp_to_datetime(timestamp): + """ + Converts the given timestamp to a datetime instance. + + :type timestamp: float + :rtype: datetime + + """ + if timestamp is not None: + return datetime.fromtimestamp(timestamp, utc) def timedelta_seconds(delta): @@ -80,151 +174,212 @@ def timedelta_seconds(delta): :type delta: timedelta :rtype: float + """ return delta.days * 24 * 60 * 60 + delta.seconds + \ delta.microseconds / 1000000.0 -def time_difference(date1, date2): - """ - Returns the time difference in seconds between the given two - datetime objects. The difference is calculated as: date1 - date2. - - :param date1: the later datetime - :type date1: datetime - :param date2: the earlier datetime - :type date2: datetime - :rtype: float - """ - later = mktime(date1.timetuple()) + date1.microsecond / 1000000.0 - earlier = mktime(date2.timetuple()) + date2.microsecond / 1000000.0 - return later - earlier - - def datetime_ceil(dateval): """ Rounds the given datetime object upwards. :type dateval: datetime + """ if dateval.microsecond > 0: - return dateval + timedelta(seconds=1, - microseconds= -dateval.microsecond) + return dateval + timedelta(seconds=1, microseconds=-dateval.microsecond) return dateval -def combine_opts(global_config, prefix, local_config={}): - """ - Returns a subdictionary from keys and values of ``global_config`` where - the key starts with the given prefix, combined with options from - local_config. The keys in the subdictionary have the prefix removed. - - :type global_config: dict - :type prefix: str - :type local_config: dict - :rtype: dict - """ - prefixlen = len(prefix) - subconf = {} - for key, value in global_config.items(): - if key.startswith(prefix): - key = key[prefixlen:] - subconf[key] = value - subconf.update(local_config) - return subconf +def datetime_repr(dateval): + return dateval.strftime('%Y-%m-%d %H:%M:%S %Z') if dateval else 'None' def get_callable_name(func): """ Returns the best available display name for the given function/callable. + + :rtype: str + """ + # the easy case (on Python 3.3+) + if hasattr(func, '__qualname__'): + return func.__qualname__ + + # class methods, bound and unbound methods f_self = getattr(func, '__self__', None) or getattr(func, 'im_self', None) - if f_self and hasattr(func, '__name__'): - if isinstance(f_self, type): - # class method - return '%s.%s' % (f_self.__name__, func.__name__) - # bound method - return '%s.%s' % (f_self.__class__.__name__, func.__name__) + f_class = f_self if isinstance(f_self, type) else f_self.__class__ + else: + f_class = getattr(func, 'im_class', None) + if f_class and hasattr(func, '__name__'): + return '%s.%s' % (f_class.__name__, func.__name__) + + # class or class instance if hasattr(func, '__call__'): + # class if hasattr(func, '__name__'): - # function, unbound method or a class with a __call__ method return func.__name__ + # instance of a class with a __call__ method return func.__class__.__name__ - raise TypeError('Unable to determine a name for %s -- ' - 'maybe it is not a callable?' % repr(func)) + raise TypeError('Unable to determine a name for %r -- maybe it is not a callable?' % func) def obj_to_ref(obj): """ - Returns the path to the given object. + Returns the path to the given callable. + + :rtype: str + :raises TypeError: if the given object is not callable + :raises ValueError: if the given object is a :class:`~functools.partial`, lambda or a nested + function + """ - ref = '%s:%s' % (obj.__module__, get_callable_name(obj)) - try: - obj2 = ref_to_obj(ref) - if obj != obj2: - raise ValueError - except Exception: - raise ValueError('Cannot determine the reference to %s' % repr(obj)) - - return ref + if isinstance(obj, partial): + raise ValueError('Cannot create a reference to a partial()') + + name = get_callable_name(obj) + if '' in name: + raise ValueError('Cannot create a reference to a lambda') + if '' in name: + raise ValueError('Cannot create a reference to a nested function') + + return '%s:%s' % (obj.__module__, name) def ref_to_obj(ref): """ Returns the object pointed to by ``ref``. + + :type ref: str + """ - if not isinstance(ref, basestring): + if not isinstance(ref, six.string_types): raise TypeError('References must be strings') - if not ':' in ref: + if ':' not in ref: raise ValueError('Invalid reference') modulename, rest = ref.split(':', 1) try: - obj = __import__(modulename) + obj = __import__(modulename, fromlist=[rest]) except ImportError: - raise LookupError('Error resolving reference %s: ' - 'could not import module' % ref) + raise LookupError('Error resolving reference %s: could not import module' % ref) try: - for name in modulename.split('.')[1:] + rest.split('.'): + for name in rest.split('.'): obj = getattr(obj, name) return obj except Exception: - raise LookupError('Error resolving reference %s: ' - 'error looking up object' % ref) + raise LookupError('Error resolving reference %s: error looking up object' % ref) def maybe_ref(ref): """ - Returns the object that the given reference points to, if it is indeed - a reference. If it is not a reference, the object is returned as-is. + Returns the object that the given reference points to, if it is indeed a reference. + If it is not a reference, the object is returned as-is. + """ if not isinstance(ref, str): return ref return ref_to_obj(ref) -def to_unicode(string, encoding='ascii'): - """ - Safely converts a string to a unicode representation on any - Python version. - """ - if hasattr(string, 'decode'): - return string.decode(encoding, 'ignore') - return string # pragma: nocover +if six.PY2: + def repr_escape(string): + if isinstance(string, six.text_type): + return string.encode('ascii', 'backslashreplace') + return string +else: + def repr_escape(string): + return string -if sys.version_info < (3, 0): # pragma: nocover - iteritems = lambda d: d.iteritems() - itervalues = lambda d: d.itervalues() - xrange = xrange - basestring = basestring -else: # pragma: nocover - iteritems = lambda d: d.items() - itervalues = lambda d: d.values() - xrange = range - basestring = str +def check_callable_args(func, args, kwargs): + """ + Ensures that the given callable can be called with the given arguments. + + :type args: tuple + :type kwargs: dict + + """ + pos_kwargs_conflicts = [] # parameters that have a match in both args and kwargs + positional_only_kwargs = [] # positional-only parameters that have a match in kwargs + unsatisfied_args = [] # parameters in signature that don't have a match in args or kwargs + unsatisfied_kwargs = [] # keyword-only arguments that don't have a match in kwargs + unmatched_args = list(args) # args that didn't match any of the parameters in the signature + # kwargs that didn't match any of the parameters in the signature + unmatched_kwargs = list(kwargs) + # indicates if the signature defines *args and **kwargs respectively + has_varargs = has_var_kwargs = False + + try: + sig = signature(func) + except ValueError: + # signature() doesn't work against every kind of callable + return + + for param in six.itervalues(sig.parameters): + if param.kind == param.POSITIONAL_OR_KEYWORD: + if param.name in unmatched_kwargs and unmatched_args: + pos_kwargs_conflicts.append(param.name) + elif unmatched_args: + del unmatched_args[0] + elif param.name in unmatched_kwargs: + unmatched_kwargs.remove(param.name) + elif param.default is param.empty: + unsatisfied_args.append(param.name) + elif param.kind == param.POSITIONAL_ONLY: + if unmatched_args: + del unmatched_args[0] + elif param.name in unmatched_kwargs: + unmatched_kwargs.remove(param.name) + positional_only_kwargs.append(param.name) + elif param.default is param.empty: + unsatisfied_args.append(param.name) + elif param.kind == param.KEYWORD_ONLY: + if param.name in unmatched_kwargs: + unmatched_kwargs.remove(param.name) + elif param.default is param.empty: + unsatisfied_kwargs.append(param.name) + elif param.kind == param.VAR_POSITIONAL: + has_varargs = True + elif param.kind == param.VAR_KEYWORD: + has_var_kwargs = True + + # Make sure there are no conflicts between args and kwargs + if pos_kwargs_conflicts: + raise ValueError('The following arguments are supplied in both args and kwargs: %s' % + ', '.join(pos_kwargs_conflicts)) + + # Check if keyword arguments are being fed to positional-only parameters + if positional_only_kwargs: + raise ValueError('The following arguments cannot be given as keyword arguments: %s' % + ', '.join(positional_only_kwargs)) + + # Check that the number of positional arguments minus the number of matched kwargs matches the + # argspec + if unsatisfied_args: + raise ValueError('The following arguments have not been supplied: %s' % + ', '.join(unsatisfied_args)) + + # Check that all keyword-only arguments have been supplied + if unsatisfied_kwargs: + raise ValueError( + 'The following keyword-only arguments have not been supplied in kwargs: %s' % + ', '.join(unsatisfied_kwargs)) + + # Check that the callable can accept the given number of positional arguments + if not has_varargs and unmatched_args: + raise ValueError( + 'The list of positional arguments is longer than the target callable can handle ' + '(allowed: %d, given in args: %d)' % (len(args) - len(unmatched_args), len(args))) + + # Check that the callable can accept the given keyword arguments + if not has_var_kwargs and unmatched_kwargs: + raise ValueError( + 'The target callable does not accept the following keyword arguments: %s' % + ', '.join(unmatched_kwargs)) diff --git a/lib/concurrent/LICENSE b/lib/concurrent/LICENSE new file mode 100644 index 00000000..a8d65b16 --- /dev/null +++ b/lib/concurrent/LICENSE @@ -0,0 +1,48 @@ +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python +alone or in any derivative version, provided, however, that PSF's +License Agreement and PSF's notice of copyright, i.e., "Copyright (c) +2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation; All Rights +Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. diff --git a/lib/concurrent/PKG-INFO b/lib/concurrent/PKG-INFO new file mode 100644 index 00000000..50dd8f09 --- /dev/null +++ b/lib/concurrent/PKG-INFO @@ -0,0 +1,16 @@ +Metadata-Version: 1.1 +Name: futures +Version: 3.1.1 +Summary: Backport of the concurrent.futures package from Python 3.2 +Home-page: https://github.com/agronholm/pythonfutures +Author: Alex Gronholm +Author-email: alex.gronholm+pypi@nextday.fi +License: PSF +Description: UNKNOWN +Platform: UNKNOWN +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 2 :: Only diff --git a/lib/concurrent/__init__.py b/lib/concurrent/__init__.py new file mode 100644 index 00000000..b36383a6 --- /dev/null +++ b/lib/concurrent/__init__.py @@ -0,0 +1,3 @@ +from pkgutil import extend_path + +__path__ = extend_path(__path__, __name__) diff --git a/lib/concurrent/futures/__init__.py b/lib/concurrent/futures/__init__.py new file mode 100644 index 00000000..428b14bd --- /dev/null +++ b/lib/concurrent/futures/__init__.py @@ -0,0 +1,23 @@ +# Copyright 2009 Brian Quinlan. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Execute computations asynchronously using threads or processes.""" + +__author__ = 'Brian Quinlan (brian@sweetapp.com)' + +from concurrent.futures._base import (FIRST_COMPLETED, + FIRST_EXCEPTION, + ALL_COMPLETED, + CancelledError, + TimeoutError, + Future, + Executor, + wait, + as_completed) +from concurrent.futures.thread import ThreadPoolExecutor + +try: + from concurrent.futures.process import ProcessPoolExecutor +except ImportError: + # some platforms don't have multiprocessing + pass diff --git a/lib/concurrent/futures/_base.py b/lib/concurrent/futures/_base.py new file mode 100644 index 00000000..ca2ebfb0 --- /dev/null +++ b/lib/concurrent/futures/_base.py @@ -0,0 +1,631 @@ +# Copyright 2009 Brian Quinlan. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +import collections +import logging +import threading +import itertools +import time +import types + +__author__ = 'Brian Quinlan (brian@sweetapp.com)' + +FIRST_COMPLETED = 'FIRST_COMPLETED' +FIRST_EXCEPTION = 'FIRST_EXCEPTION' +ALL_COMPLETED = 'ALL_COMPLETED' +_AS_COMPLETED = '_AS_COMPLETED' + +# Possible future states (for internal use by the futures package). +PENDING = 'PENDING' +RUNNING = 'RUNNING' +# The future was cancelled by the user... +CANCELLED = 'CANCELLED' +# ...and _Waiter.add_cancelled() was called by a worker. +CANCELLED_AND_NOTIFIED = 'CANCELLED_AND_NOTIFIED' +FINISHED = 'FINISHED' + +_FUTURE_STATES = [ + PENDING, + RUNNING, + CANCELLED, + CANCELLED_AND_NOTIFIED, + FINISHED +] + +_STATE_TO_DESCRIPTION_MAP = { + PENDING: "pending", + RUNNING: "running", + CANCELLED: "cancelled", + CANCELLED_AND_NOTIFIED: "cancelled", + FINISHED: "finished" +} + +# Logger for internal use by the futures package. +LOGGER = logging.getLogger("concurrent.futures") + +class Error(Exception): + """Base class for all future-related exceptions.""" + pass + +class CancelledError(Error): + """The Future was cancelled.""" + pass + +class TimeoutError(Error): + """The operation exceeded the given deadline.""" + pass + +class _Waiter(object): + """Provides the event that wait() and as_completed() block on.""" + def __init__(self): + self.event = threading.Event() + self.finished_futures = [] + + def add_result(self, future): + self.finished_futures.append(future) + + def add_exception(self, future): + self.finished_futures.append(future) + + def add_cancelled(self, future): + self.finished_futures.append(future) + +class _AsCompletedWaiter(_Waiter): + """Used by as_completed().""" + + def __init__(self): + super(_AsCompletedWaiter, self).__init__() + self.lock = threading.Lock() + + def add_result(self, future): + with self.lock: + super(_AsCompletedWaiter, self).add_result(future) + self.event.set() + + def add_exception(self, future): + with self.lock: + super(_AsCompletedWaiter, self).add_exception(future) + self.event.set() + + def add_cancelled(self, future): + with self.lock: + super(_AsCompletedWaiter, self).add_cancelled(future) + self.event.set() + +class _FirstCompletedWaiter(_Waiter): + """Used by wait(return_when=FIRST_COMPLETED).""" + + def add_result(self, future): + super(_FirstCompletedWaiter, self).add_result(future) + self.event.set() + + def add_exception(self, future): + super(_FirstCompletedWaiter, self).add_exception(future) + self.event.set() + + def add_cancelled(self, future): + super(_FirstCompletedWaiter, self).add_cancelled(future) + self.event.set() + +class _AllCompletedWaiter(_Waiter): + """Used by wait(return_when=FIRST_EXCEPTION and ALL_COMPLETED).""" + + def __init__(self, num_pending_calls, stop_on_exception): + self.num_pending_calls = num_pending_calls + self.stop_on_exception = stop_on_exception + self.lock = threading.Lock() + super(_AllCompletedWaiter, self).__init__() + + def _decrement_pending_calls(self): + with self.lock: + self.num_pending_calls -= 1 + if not self.num_pending_calls: + self.event.set() + + def add_result(self, future): + super(_AllCompletedWaiter, self).add_result(future) + self._decrement_pending_calls() + + def add_exception(self, future): + super(_AllCompletedWaiter, self).add_exception(future) + if self.stop_on_exception: + self.event.set() + else: + self._decrement_pending_calls() + + def add_cancelled(self, future): + super(_AllCompletedWaiter, self).add_cancelled(future) + self._decrement_pending_calls() + +class _AcquireFutures(object): + """A context manager that does an ordered acquire of Future conditions.""" + + def __init__(self, futures): + self.futures = sorted(futures, key=id) + + def __enter__(self): + for future in self.futures: + future._condition.acquire() + + def __exit__(self, *args): + for future in self.futures: + future._condition.release() + +def _create_and_install_waiters(fs, return_when): + if return_when == _AS_COMPLETED: + waiter = _AsCompletedWaiter() + elif return_when == FIRST_COMPLETED: + waiter = _FirstCompletedWaiter() + else: + pending_count = sum( + f._state not in [CANCELLED_AND_NOTIFIED, FINISHED] for f in fs) + + if return_when == FIRST_EXCEPTION: + waiter = _AllCompletedWaiter(pending_count, stop_on_exception=True) + elif return_when == ALL_COMPLETED: + waiter = _AllCompletedWaiter(pending_count, stop_on_exception=False) + else: + raise ValueError("Invalid return condition: %r" % return_when) + + for f in fs: + f._waiters.append(waiter) + + return waiter + +def as_completed(fs, timeout=None): + """An iterator over the given futures that yields each as it completes. + + Args: + fs: The sequence of Futures (possibly created by different Executors) to + iterate over. + timeout: The maximum number of seconds to wait. If None, then there + is no limit on the wait time. + + Returns: + An iterator that yields the given Futures as they complete (finished or + cancelled). If any given Futures are duplicated, they will be returned + once. + + Raises: + TimeoutError: If the entire result iterator could not be generated + before the given timeout. + """ + if timeout is not None: + end_time = timeout + time.time() + + fs = set(fs) + with _AcquireFutures(fs): + finished = set( + f for f in fs + if f._state in [CANCELLED_AND_NOTIFIED, FINISHED]) + pending = fs - finished + waiter = _create_and_install_waiters(fs, _AS_COMPLETED) + + try: + for future in finished: + yield future + + while pending: + if timeout is None: + wait_timeout = None + else: + wait_timeout = end_time - time.time() + if wait_timeout < 0: + raise TimeoutError( + '%d (of %d) futures unfinished' % ( + len(pending), len(fs))) + + waiter.event.wait(wait_timeout) + + with waiter.lock: + finished = waiter.finished_futures + waiter.finished_futures = [] + waiter.event.clear() + + for future in finished: + yield future + pending.remove(future) + + finally: + for f in fs: + with f._condition: + f._waiters.remove(waiter) + +DoneAndNotDoneFutures = collections.namedtuple( + 'DoneAndNotDoneFutures', 'done not_done') +def wait(fs, timeout=None, return_when=ALL_COMPLETED): + """Wait for the futures in the given sequence to complete. + + Args: + fs: The sequence of Futures (possibly created by different Executors) to + wait upon. + timeout: The maximum number of seconds to wait. If None, then there + is no limit on the wait time. + return_when: Indicates when this function should return. The options + are: + + FIRST_COMPLETED - Return when any future finishes or is + cancelled. + FIRST_EXCEPTION - Return when any future finishes by raising an + exception. If no future raises an exception + then it is equivalent to ALL_COMPLETED. + ALL_COMPLETED - Return when all futures finish or are cancelled. + + Returns: + A named 2-tuple of sets. The first set, named 'done', contains the + futures that completed (is finished or cancelled) before the wait + completed. The second set, named 'not_done', contains uncompleted + futures. + """ + with _AcquireFutures(fs): + done = set(f for f in fs + if f._state in [CANCELLED_AND_NOTIFIED, FINISHED]) + not_done = set(fs) - done + + if (return_when == FIRST_COMPLETED) and done: + return DoneAndNotDoneFutures(done, not_done) + elif (return_when == FIRST_EXCEPTION) and done: + if any(f for f in done + if not f.cancelled() and f.exception() is not None): + return DoneAndNotDoneFutures(done, not_done) + + if len(done) == len(fs): + return DoneAndNotDoneFutures(done, not_done) + + waiter = _create_and_install_waiters(fs, return_when) + + waiter.event.wait(timeout) + for f in fs: + with f._condition: + f._waiters.remove(waiter) + + done.update(waiter.finished_futures) + return DoneAndNotDoneFutures(done, set(fs) - done) + +class Future(object): + """Represents the result of an asynchronous computation.""" + + def __init__(self): + """Initializes the future. Should not be called by clients.""" + self._condition = threading.Condition() + self._state = PENDING + self._result = None + self._exception = None + self._traceback = None + self._waiters = [] + self._done_callbacks = [] + + def _invoke_callbacks(self): + for callback in self._done_callbacks: + try: + callback(self) + except Exception: + LOGGER.exception('exception calling callback for %r', self) + except BaseException: + # Explicitly let all other new-style exceptions through so + # that we can catch all old-style exceptions with a simple + # "except:" clause below. + # + # All old-style exception objects are instances of + # types.InstanceType, but "except types.InstanceType:" does + # not catch old-style exceptions for some reason. Thus, the + # only way to catch all old-style exceptions without catching + # any new-style exceptions is to filter out the new-style + # exceptions, which all derive from BaseException. + raise + except: + # Because of the BaseException clause above, this handler only + # executes for old-style exception objects. + LOGGER.exception('exception calling callback for %r', self) + + def __repr__(self): + with self._condition: + if self._state == FINISHED: + if self._exception: + return '' % ( + hex(id(self)), + _STATE_TO_DESCRIPTION_MAP[self._state], + self._exception.__class__.__name__) + else: + return '' % ( + hex(id(self)), + _STATE_TO_DESCRIPTION_MAP[self._state], + self._result.__class__.__name__) + return '' % ( + hex(id(self)), + _STATE_TO_DESCRIPTION_MAP[self._state]) + + def cancel(self): + """Cancel the future if possible. + + Returns True if the future was cancelled, False otherwise. A future + cannot be cancelled if it is running or has already completed. + """ + with self._condition: + if self._state in [RUNNING, FINISHED]: + return False + + if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: + return True + + self._state = CANCELLED + self._condition.notify_all() + + self._invoke_callbacks() + return True + + def cancelled(self): + """Return True if the future has cancelled.""" + with self._condition: + return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED] + + def running(self): + """Return True if the future is currently executing.""" + with self._condition: + return self._state == RUNNING + + def done(self): + """Return True of the future was cancelled or finished executing.""" + with self._condition: + return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED] + + def __get_result(self): + if self._exception: + if isinstance(self._exception, types.InstanceType): + # The exception is an instance of an old-style class, which + # means type(self._exception) returns types.ClassType instead + # of the exception's actual class type. + exception_type = self._exception.__class__ + else: + exception_type = type(self._exception) + raise exception_type, self._exception, self._traceback + else: + return self._result + + def add_done_callback(self, fn): + """Attaches a callable that will be called when the future finishes. + + Args: + fn: A callable that will be called with this future as its only + argument when the future completes or is cancelled. The callable + will always be called by a thread in the same process in which + it was added. If the future has already completed or been + cancelled then the callable will be called immediately. These + callables are called in the order that they were added. + """ + with self._condition: + if self._state not in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]: + self._done_callbacks.append(fn) + return + fn(self) + + def result(self, timeout=None): + """Return the result of the call that the future represents. + + Args: + timeout: The number of seconds to wait for the result if the future + isn't done. If None, then there is no limit on the wait time. + + Returns: + The result of the call that the future represents. + + Raises: + CancelledError: If the future was cancelled. + TimeoutError: If the future didn't finish executing before the given + timeout. + Exception: If the call raised then that exception will be raised. + """ + with self._condition: + if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: + raise CancelledError() + elif self._state == FINISHED: + return self.__get_result() + + self._condition.wait(timeout) + + if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: + raise CancelledError() + elif self._state == FINISHED: + return self.__get_result() + else: + raise TimeoutError() + + def exception_info(self, timeout=None): + """Return a tuple of (exception, traceback) raised by the call that the + future represents. + + Args: + timeout: The number of seconds to wait for the exception if the + future isn't done. If None, then there is no limit on the wait + time. + + Returns: + The exception raised by the call that the future represents or None + if the call completed without raising. + + Raises: + CancelledError: If the future was cancelled. + TimeoutError: If the future didn't finish executing before the given + timeout. + """ + with self._condition: + if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: + raise CancelledError() + elif self._state == FINISHED: + return self._exception, self._traceback + + self._condition.wait(timeout) + + if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: + raise CancelledError() + elif self._state == FINISHED: + return self._exception, self._traceback + else: + raise TimeoutError() + + def exception(self, timeout=None): + """Return the exception raised by the call that the future represents. + + Args: + timeout: The number of seconds to wait for the exception if the + future isn't done. If None, then there is no limit on the wait + time. + + Returns: + The exception raised by the call that the future represents or None + if the call completed without raising. + + Raises: + CancelledError: If the future was cancelled. + TimeoutError: If the future didn't finish executing before the given + timeout. + """ + return self.exception_info(timeout)[0] + + # The following methods should only be used by Executors and in tests. + def set_running_or_notify_cancel(self): + """Mark the future as running or process any cancel notifications. + + Should only be used by Executor implementations and unit tests. + + If the future has been cancelled (cancel() was called and returned + True) then any threads waiting on the future completing (though calls + to as_completed() or wait()) are notified and False is returned. + + If the future was not cancelled then it is put in the running state + (future calls to running() will return True) and True is returned. + + This method should be called by Executor implementations before + executing the work associated with this future. If this method returns + False then the work should not be executed. + + Returns: + False if the Future was cancelled, True otherwise. + + Raises: + RuntimeError: if this method was already called or if set_result() + or set_exception() was called. + """ + with self._condition: + if self._state == CANCELLED: + self._state = CANCELLED_AND_NOTIFIED + for waiter in self._waiters: + waiter.add_cancelled(self) + # self._condition.notify_all() is not necessary because + # self.cancel() triggers a notification. + return False + elif self._state == PENDING: + self._state = RUNNING + return True + else: + LOGGER.critical('Future %s in unexpected state: %s', + id(self), + self._state) + raise RuntimeError('Future in unexpected state') + + def set_result(self, result): + """Sets the return value of work associated with the future. + + Should only be used by Executor implementations and unit tests. + """ + with self._condition: + self._result = result + self._state = FINISHED + for waiter in self._waiters: + waiter.add_result(self) + self._condition.notify_all() + self._invoke_callbacks() + + def set_exception_info(self, exception, traceback): + """Sets the result of the future as being the given exception + and traceback. + + Should only be used by Executor implementations and unit tests. + """ + with self._condition: + self._exception = exception + self._traceback = traceback + self._state = FINISHED + for waiter in self._waiters: + waiter.add_exception(self) + self._condition.notify_all() + self._invoke_callbacks() + + def set_exception(self, exception): + """Sets the result of the future as being the given exception. + + Should only be used by Executor implementations and unit tests. + """ + self.set_exception_info(exception, None) + +class Executor(object): + """This is an abstract base class for concrete asynchronous executors.""" + + def submit(self, fn, *args, **kwargs): + """Submits a callable to be executed with the given arguments. + + Schedules the callable to be executed as fn(*args, **kwargs) and returns + a Future instance representing the execution of the callable. + + Returns: + A Future representing the given call. + """ + raise NotImplementedError() + + def map(self, fn, *iterables, **kwargs): + """Returns a iterator equivalent to map(fn, iter). + + Args: + fn: A callable that will take as many arguments as there are + passed iterables. + timeout: The maximum number of seconds to wait. If None, then there + is no limit on the wait time. + + Returns: + An iterator equivalent to: map(func, *iterables) but the calls may + be evaluated out-of-order. + + Raises: + TimeoutError: If the entire result iterator could not be generated + before the given timeout. + Exception: If fn(*args) raises for any values. + """ + timeout = kwargs.get('timeout') + if timeout is not None: + end_time = timeout + time.time() + + fs = [self.submit(fn, *args) for args in itertools.izip(*iterables)] + + # Yield must be hidden in closure so that the futures are submitted + # before the first iterator value is required. + def result_iterator(): + try: + for future in fs: + if timeout is None: + yield future.result() + else: + yield future.result(end_time - time.time()) + finally: + for future in fs: + future.cancel() + return result_iterator() + + def shutdown(self, wait=True): + """Clean-up the resources associated with the Executor. + + It is safe to call this method several times. Otherwise, no other + methods can be called after this one. + + Args: + wait: If True then shutdown will not return until all running + futures have finished executing and the resources used by the + executor have been reclaimed. + """ + pass + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.shutdown(wait=True) + return False diff --git a/lib/concurrent/futures/process.py b/lib/concurrent/futures/process.py new file mode 100644 index 00000000..fa5b96fd --- /dev/null +++ b/lib/concurrent/futures/process.py @@ -0,0 +1,363 @@ +# Copyright 2009 Brian Quinlan. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Implements ProcessPoolExecutor. + +The follow diagram and text describe the data-flow through the system: + +|======================= In-process =====================|== Out-of-process ==| + ++----------+ +----------+ +--------+ +-----------+ +---------+ +| | => | Work Ids | => | | => | Call Q | => | | +| | +----------+ | | +-----------+ | | +| | | ... | | | | ... | | | +| | | 6 | | | | 5, call() | | | +| | | 7 | | | | ... | | | +| Process | | ... | | Local | +-----------+ | Process | +| Pool | +----------+ | Worker | | #1..n | +| Executor | | Thread | | | +| | +----------- + | | +-----------+ | | +| | <=> | Work Items | <=> | | <= | Result Q | <= | | +| | +------------+ | | +-----------+ | | +| | | 6: call() | | | | ... | | | +| | | future | | | | 4, result | | | +| | | ... | | | | 3, except | | | ++----------+ +------------+ +--------+ +-----------+ +---------+ + +Executor.submit() called: +- creates a uniquely numbered _WorkItem and adds it to the "Work Items" dict +- adds the id of the _WorkItem to the "Work Ids" queue + +Local worker thread: +- reads work ids from the "Work Ids" queue and looks up the corresponding + WorkItem from the "Work Items" dict: if the work item has been cancelled then + it is simply removed from the dict, otherwise it is repackaged as a + _CallItem and put in the "Call Q". New _CallItems are put in the "Call Q" + until "Call Q" is full. NOTE: the size of the "Call Q" is kept small because + calls placed in the "Call Q" can no longer be cancelled with Future.cancel(). +- reads _ResultItems from "Result Q", updates the future stored in the + "Work Items" dict and deletes the dict entry + +Process #1..n: +- reads _CallItems from "Call Q", executes the calls, and puts the resulting + _ResultItems in "Request Q" +""" + +import atexit +from concurrent.futures import _base +import Queue as queue +import multiprocessing +import threading +import weakref +import sys + +__author__ = 'Brian Quinlan (brian@sweetapp.com)' + +# Workers are created as daemon threads and processes. This is done to allow the +# interpreter to exit when there are still idle processes in a +# ProcessPoolExecutor's process pool (i.e. shutdown() was not called). However, +# allowing workers to die with the interpreter has two undesirable properties: +# - The workers would still be running during interpretor shutdown, +# meaning that they would fail in unpredictable ways. +# - The workers could be killed while evaluating a work item, which could +# be bad if the callable being evaluated has external side-effects e.g. +# writing to a file. +# +# To work around this problem, an exit handler is installed which tells the +# workers to exit when their work queues are empty and then waits until the +# threads/processes finish. + +_threads_queues = weakref.WeakKeyDictionary() +_shutdown = False + +def _python_exit(): + global _shutdown + _shutdown = True + items = list(_threads_queues.items()) if _threads_queues else () + for t, q in items: + q.put(None) + for t, q in items: + t.join(sys.maxint) + +# Controls how many more calls than processes will be queued in the call queue. +# A smaller number will mean that processes spend more time idle waiting for +# work while a larger number will make Future.cancel() succeed less frequently +# (Futures in the call queue cannot be cancelled). +EXTRA_QUEUED_CALLS = 1 + +class _WorkItem(object): + def __init__(self, future, fn, args, kwargs): + self.future = future + self.fn = fn + self.args = args + self.kwargs = kwargs + +class _ResultItem(object): + def __init__(self, work_id, exception=None, result=None): + self.work_id = work_id + self.exception = exception + self.result = result + +class _CallItem(object): + def __init__(self, work_id, fn, args, kwargs): + self.work_id = work_id + self.fn = fn + self.args = args + self.kwargs = kwargs + +def _process_worker(call_queue, result_queue): + """Evaluates calls from call_queue and places the results in result_queue. + + This worker is run in a separate process. + + Args: + call_queue: A multiprocessing.Queue of _CallItems that will be read and + evaluated by the worker. + result_queue: A multiprocessing.Queue of _ResultItems that will written + to by the worker. + shutdown: A multiprocessing.Event that will be set as a signal to the + worker that it should exit when call_queue is empty. + """ + while True: + call_item = call_queue.get(block=True) + if call_item is None: + # Wake up queue management thread + result_queue.put(None) + return + try: + r = call_item.fn(*call_item.args, **call_item.kwargs) + except: + e = sys.exc_info()[1] + result_queue.put(_ResultItem(call_item.work_id, + exception=e)) + else: + result_queue.put(_ResultItem(call_item.work_id, + result=r)) + +def _add_call_item_to_queue(pending_work_items, + work_ids, + call_queue): + """Fills call_queue with _WorkItems from pending_work_items. + + This function never blocks. + + Args: + pending_work_items: A dict mapping work ids to _WorkItems e.g. + {5: <_WorkItem...>, 6: <_WorkItem...>, ...} + work_ids: A queue.Queue of work ids e.g. Queue([5, 6, ...]). Work ids + are consumed and the corresponding _WorkItems from + pending_work_items are transformed into _CallItems and put in + call_queue. + call_queue: A multiprocessing.Queue that will be filled with _CallItems + derived from _WorkItems. + """ + while True: + if call_queue.full(): + return + try: + work_id = work_ids.get(block=False) + except queue.Empty: + return + else: + work_item = pending_work_items[work_id] + + if work_item.future.set_running_or_notify_cancel(): + call_queue.put(_CallItem(work_id, + work_item.fn, + work_item.args, + work_item.kwargs), + block=True) + else: + del pending_work_items[work_id] + continue + +def _queue_management_worker(executor_reference, + processes, + pending_work_items, + work_ids_queue, + call_queue, + result_queue): + """Manages the communication between this process and the worker processes. + + This function is run in a local thread. + + Args: + executor_reference: A weakref.ref to the ProcessPoolExecutor that owns + this thread. Used to determine if the ProcessPoolExecutor has been + garbage collected and that this function can exit. + process: A list of the multiprocessing.Process instances used as + workers. + pending_work_items: A dict mapping work ids to _WorkItems e.g. + {5: <_WorkItem...>, 6: <_WorkItem...>, ...} + work_ids_queue: A queue.Queue of work ids e.g. Queue([5, 6, ...]). + call_queue: A multiprocessing.Queue that will be filled with _CallItems + derived from _WorkItems for processing by the process workers. + result_queue: A multiprocessing.Queue of _ResultItems generated by the + process workers. + """ + nb_shutdown_processes = [0] + def shutdown_one_process(): + """Tell a worker to terminate, which will in turn wake us again""" + call_queue.put(None) + nb_shutdown_processes[0] += 1 + while True: + _add_call_item_to_queue(pending_work_items, + work_ids_queue, + call_queue) + + result_item = result_queue.get(block=True) + if result_item is not None: + work_item = pending_work_items[result_item.work_id] + del pending_work_items[result_item.work_id] + + if result_item.exception: + work_item.future.set_exception(result_item.exception) + else: + work_item.future.set_result(result_item.result) + # Delete references to object. See issue16284 + del work_item + # Check whether we should start shutting down. + executor = executor_reference() + # No more work items can be added if: + # - The interpreter is shutting down OR + # - The executor that owns this worker has been collected OR + # - The executor that owns this worker has been shutdown. + if _shutdown or executor is None or executor._shutdown_thread: + # Since no new work items can be added, it is safe to shutdown + # this thread if there are no pending work items. + if not pending_work_items: + while nb_shutdown_processes[0] < len(processes): + shutdown_one_process() + # If .join() is not called on the created processes then + # some multiprocessing.Queue methods may deadlock on Mac OS + # X. + for p in processes: + p.join() + call_queue.close() + return + del executor + +_system_limits_checked = False +_system_limited = None +def _check_system_limits(): + global _system_limits_checked, _system_limited + if _system_limits_checked: + if _system_limited: + raise NotImplementedError(_system_limited) + _system_limits_checked = True + try: + import os + nsems_max = os.sysconf("SC_SEM_NSEMS_MAX") + except (AttributeError, ValueError): + # sysconf not available or setting not available + return + if nsems_max == -1: + # indetermine limit, assume that limit is determined + # by available memory only + return + if nsems_max >= 256: + # minimum number of semaphores available + # according to POSIX + return + _system_limited = "system provides too few semaphores (%d available, 256 necessary)" % nsems_max + raise NotImplementedError(_system_limited) + + +class ProcessPoolExecutor(_base.Executor): + def __init__(self, max_workers=None): + """Initializes a new ProcessPoolExecutor instance. + + Args: + max_workers: The maximum number of processes that can be used to + execute the given calls. If None or not given then as many + worker processes will be created as the machine has processors. + """ + _check_system_limits() + + if max_workers is None: + self._max_workers = multiprocessing.cpu_count() + else: + if max_workers <= 0: + raise ValueError("max_workers must be greater than 0") + + self._max_workers = max_workers + + # Make the call queue slightly larger than the number of processes to + # prevent the worker processes from idling. But don't make it too big + # because futures in the call queue cannot be cancelled. + self._call_queue = multiprocessing.Queue(self._max_workers + + EXTRA_QUEUED_CALLS) + self._result_queue = multiprocessing.Queue() + self._work_ids = queue.Queue() + self._queue_management_thread = None + self._processes = set() + + # Shutdown is a two-step process. + self._shutdown_thread = False + self._shutdown_lock = threading.Lock() + self._queue_count = 0 + self._pending_work_items = {} + + def _start_queue_management_thread(self): + # When the executor gets lost, the weakref callback will wake up + # the queue management thread. + def weakref_cb(_, q=self._result_queue): + q.put(None) + if self._queue_management_thread is None: + self._queue_management_thread = threading.Thread( + target=_queue_management_worker, + args=(weakref.ref(self, weakref_cb), + self._processes, + self._pending_work_items, + self._work_ids, + self._call_queue, + self._result_queue)) + self._queue_management_thread.daemon = True + self._queue_management_thread.start() + _threads_queues[self._queue_management_thread] = self._result_queue + + def _adjust_process_count(self): + for _ in range(len(self._processes), self._max_workers): + p = multiprocessing.Process( + target=_process_worker, + args=(self._call_queue, + self._result_queue)) + p.start() + self._processes.add(p) + + def submit(self, fn, *args, **kwargs): + with self._shutdown_lock: + if self._shutdown_thread: + raise RuntimeError('cannot schedule new futures after shutdown') + + f = _base.Future() + w = _WorkItem(f, fn, args, kwargs) + + self._pending_work_items[self._queue_count] = w + self._work_ids.put(self._queue_count) + self._queue_count += 1 + # Wake up queue management thread + self._result_queue.put(None) + + self._start_queue_management_thread() + self._adjust_process_count() + return f + submit.__doc__ = _base.Executor.submit.__doc__ + + def shutdown(self, wait=True): + with self._shutdown_lock: + self._shutdown_thread = True + if self._queue_management_thread: + # Wake up queue management thread + self._result_queue.put(None) + if wait: + self._queue_management_thread.join(sys.maxint) + # To reduce the risk of openning too many files, remove references to + # objects that use file descriptors. + self._queue_management_thread = None + self._call_queue = None + self._result_queue = None + self._processes = None + shutdown.__doc__ = _base.Executor.shutdown.__doc__ + +atexit.register(_python_exit) diff --git a/lib/concurrent/futures/thread.py b/lib/concurrent/futures/thread.py new file mode 100644 index 00000000..efae619a --- /dev/null +++ b/lib/concurrent/futures/thread.py @@ -0,0 +1,149 @@ +# Copyright 2009 Brian Quinlan. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Implements ThreadPoolExecutor.""" + +import atexit +from concurrent.futures import _base +import Queue as queue +import threading +import weakref +import sys + +try: + from multiprocessing import cpu_count +except ImportError: + # some platforms don't have multiprocessing + def cpu_count(): + return None + +__author__ = 'Brian Quinlan (brian@sweetapp.com)' + +# Workers are created as daemon threads. This is done to allow the interpreter +# to exit when there are still idle threads in a ThreadPoolExecutor's thread +# pool (i.e. shutdown() was not called). However, allowing workers to die with +# the interpreter has two undesirable properties: +# - The workers would still be running during interpretor shutdown, +# meaning that they would fail in unpredictable ways. +# - The workers could be killed while evaluating a work item, which could +# be bad if the callable being evaluated has external side-effects e.g. +# writing to a file. +# +# To work around this problem, an exit handler is installed which tells the +# workers to exit when their work queues are empty and then waits until the +# threads finish. + +_threads_queues = weakref.WeakKeyDictionary() +_shutdown = False + +def _python_exit(): + global _shutdown + _shutdown = True + items = list(_threads_queues.items()) if _threads_queues else () + for t, q in items: + q.put(None) + for t, q in items: + t.join(sys.maxint) + +atexit.register(_python_exit) + +class _WorkItem(object): + def __init__(self, future, fn, args, kwargs): + self.future = future + self.fn = fn + self.args = args + self.kwargs = kwargs + + def run(self): + if not self.future.set_running_or_notify_cancel(): + return + + try: + result = self.fn(*self.args, **self.kwargs) + except: + e, tb = sys.exc_info()[1:] + self.future.set_exception_info(e, tb) + else: + self.future.set_result(result) + +def _worker(executor_reference, work_queue): + try: + while True: + work_item = work_queue.get(block=True) + if work_item is not None: + work_item.run() + # Delete references to object. See issue16284 + del work_item + continue + executor = executor_reference() + # Exit if: + # - The interpreter is shutting down OR + # - The executor that owns the worker has been collected OR + # - The executor that owns the worker has been shutdown. + if _shutdown or executor is None or executor._shutdown: + # Notice other workers + work_queue.put(None) + return + del executor + except: + _base.LOGGER.critical('Exception in worker', exc_info=True) + + +class ThreadPoolExecutor(_base.Executor): + def __init__(self, max_workers=None): + """Initializes a new ThreadPoolExecutor instance. + + Args: + max_workers: The maximum number of threads that can be used to + execute the given calls. + """ + if max_workers is None: + # Use this number because ThreadPoolExecutor is often + # used to overlap I/O instead of CPU work. + max_workers = (cpu_count() or 1) * 5 + if max_workers <= 0: + raise ValueError("max_workers must be greater than 0") + + self._max_workers = max_workers + self._work_queue = queue.Queue() + self._threads = set() + self._shutdown = False + self._shutdown_lock = threading.Lock() + + def submit(self, fn, *args, **kwargs): + with self._shutdown_lock: + if self._shutdown: + raise RuntimeError('cannot schedule new futures after shutdown') + + f = _base.Future() + w = _WorkItem(f, fn, args, kwargs) + + self._work_queue.put(w) + self._adjust_thread_count() + return f + submit.__doc__ = _base.Executor.submit.__doc__ + + def _adjust_thread_count(self): + # When the executor gets lost, the weakref callback will wake up + # the worker threads. + def weakref_cb(_, q=self._work_queue): + q.put(None) + # TODO(bquinlan): Should avoid creating new threads if there are more + # idle threads than items in the work queue. + if len(self._threads) < self._max_workers: + t = threading.Thread(target=_worker, + args=(weakref.ref(self, weakref_cb), + self._work_queue)) + t.daemon = True + t.start() + self._threads.add(t) + _threads_queues[t] = self._work_queue + + def shutdown(self, wait=True): + with self._shutdown_lock: + self._shutdown = True + self._work_queue.put(None) + if wait: + for t in self._threads: + t.join(sys.maxint) + shutdown.__doc__ = _base.Executor.shutdown.__doc__ diff --git a/lib/funcsigs/__init__.py b/lib/funcsigs/__init__.py new file mode 100644 index 00000000..5f5378b4 --- /dev/null +++ b/lib/funcsigs/__init__.py @@ -0,0 +1,829 @@ +# Copyright 2001-2013 Python Software Foundation; All Rights Reserved +"""Function signature objects for callables + +Back port of Python 3.3's function signature tools from the inspect module, +modified to be compatible with Python 2.6, 2.7 and 3.3+. +""" +from __future__ import absolute_import, division, print_function +import itertools +import functools +import re +import types + +try: + from collections import OrderedDict +except ImportError: + from ordereddict import OrderedDict + +from funcsigs.version import __version__ + +__all__ = ['BoundArguments', 'Parameter', 'Signature', 'signature'] + + +_WrapperDescriptor = type(type.__call__) +_MethodWrapper = type(all.__call__) + +_NonUserDefinedCallables = (_WrapperDescriptor, + _MethodWrapper, + types.BuiltinFunctionType) + + +def formatannotation(annotation, base_module=None): + if isinstance(annotation, type): + if annotation.__module__ in ('builtins', '__builtin__', base_module): + return annotation.__name__ + return annotation.__module__+'.'+annotation.__name__ + return repr(annotation) + + +def _get_user_defined_method(cls, method_name, *nested): + try: + if cls is type: + return + meth = getattr(cls, method_name) + for name in nested: + meth = getattr(meth, name, meth) + except AttributeError: + return + else: + if not isinstance(meth, _NonUserDefinedCallables): + # Once '__signature__' will be added to 'C'-level + # callables, this check won't be necessary + return meth + + +def signature(obj): + '''Get a signature object for the passed callable.''' + + if not callable(obj): + raise TypeError('{0!r} is not a callable object'.format(obj)) + + if isinstance(obj, types.MethodType): + sig = signature(obj.__func__) + if obj.__self__ is None: + # Unbound method - preserve as-is. + return sig + else: + # Bound method. Eat self - if we can. + params = tuple(sig.parameters.values()) + + if not params or params[0].kind in (_VAR_KEYWORD, _KEYWORD_ONLY): + raise ValueError('invalid method signature') + + kind = params[0].kind + if kind in (_POSITIONAL_OR_KEYWORD, _POSITIONAL_ONLY): + # Drop first parameter: + # '(p1, p2[, ...])' -> '(p2[, ...])' + params = params[1:] + else: + if kind is not _VAR_POSITIONAL: + # Unless we add a new parameter type we never + # get here + raise ValueError('invalid argument type') + # It's a var-positional parameter. + # Do nothing. '(*args[, ...])' -> '(*args[, ...])' + + return sig.replace(parameters=params) + + try: + sig = obj.__signature__ + except AttributeError: + pass + else: + if sig is not None: + return sig + + try: + # Was this function wrapped by a decorator? + wrapped = obj.__wrapped__ + except AttributeError: + pass + else: + return signature(wrapped) + + if isinstance(obj, types.FunctionType): + return Signature.from_function(obj) + + if isinstance(obj, functools.partial): + sig = signature(obj.func) + + new_params = OrderedDict(sig.parameters.items()) + + partial_args = obj.args or () + partial_keywords = obj.keywords or {} + try: + ba = sig.bind_partial(*partial_args, **partial_keywords) + except TypeError as ex: + msg = 'partial object {0!r} has incorrect arguments'.format(obj) + raise ValueError(msg) + + for arg_name, arg_value in ba.arguments.items(): + param = new_params[arg_name] + if arg_name in partial_keywords: + # We set a new default value, because the following code + # is correct: + # + # >>> def foo(a): print(a) + # >>> print(partial(partial(foo, a=10), a=20)()) + # 20 + # >>> print(partial(partial(foo, a=10), a=20)(a=30)) + # 30 + # + # So, with 'partial' objects, passing a keyword argument is + # like setting a new default value for the corresponding + # parameter + # + # We also mark this parameter with '_partial_kwarg' + # flag. Later, in '_bind', the 'default' value of this + # parameter will be added to 'kwargs', to simulate + # the 'functools.partial' real call. + new_params[arg_name] = param.replace(default=arg_value, + _partial_kwarg=True) + + elif (param.kind not in (_VAR_KEYWORD, _VAR_POSITIONAL) and + not param._partial_kwarg): + new_params.pop(arg_name) + + return sig.replace(parameters=new_params.values()) + + sig = None + if isinstance(obj, type): + # obj is a class or a metaclass + + # First, let's see if it has an overloaded __call__ defined + # in its metaclass + call = _get_user_defined_method(type(obj), '__call__') + if call is not None: + sig = signature(call) + else: + # Now we check if the 'obj' class has a '__new__' method + new = _get_user_defined_method(obj, '__new__') + if new is not None: + sig = signature(new) + else: + # Finally, we should have at least __init__ implemented + init = _get_user_defined_method(obj, '__init__') + if init is not None: + sig = signature(init) + elif not isinstance(obj, _NonUserDefinedCallables): + # An object with __call__ + # We also check that the 'obj' is not an instance of + # _WrapperDescriptor or _MethodWrapper to avoid + # infinite recursion (and even potential segfault) + call = _get_user_defined_method(type(obj), '__call__', 'im_func') + if call is not None: + sig = signature(call) + + if sig is not None: + # For classes and objects we skip the first parameter of their + # __call__, __new__, or __init__ methods + return sig.replace(parameters=tuple(sig.parameters.values())[1:]) + + if isinstance(obj, types.BuiltinFunctionType): + # Raise a nicer error message for builtins + msg = 'no signature found for builtin function {0!r}'.format(obj) + raise ValueError(msg) + + raise ValueError('callable {0!r} is not supported by signature'.format(obj)) + + +class _void(object): + '''A private marker - used in Parameter & Signature''' + + +class _empty(object): + pass + + +class _ParameterKind(int): + def __new__(self, *args, **kwargs): + obj = int.__new__(self, *args) + obj._name = kwargs['name'] + return obj + + def __str__(self): + return self._name + + def __repr__(self): + return '<_ParameterKind: {0!r}>'.format(self._name) + + +_POSITIONAL_ONLY = _ParameterKind(0, name='POSITIONAL_ONLY') +_POSITIONAL_OR_KEYWORD = _ParameterKind(1, name='POSITIONAL_OR_KEYWORD') +_VAR_POSITIONAL = _ParameterKind(2, name='VAR_POSITIONAL') +_KEYWORD_ONLY = _ParameterKind(3, name='KEYWORD_ONLY') +_VAR_KEYWORD = _ParameterKind(4, name='VAR_KEYWORD') + + +class Parameter(object): + '''Represents a parameter in a function signature. + + Has the following public attributes: + + * name : str + The name of the parameter as a string. + * default : object + The default value for the parameter if specified. If the + parameter has no default value, this attribute is not set. + * annotation + The annotation for the parameter if specified. If the + parameter has no annotation, this attribute is not set. + * kind : str + Describes how argument values are bound to the parameter. + Possible values: `Parameter.POSITIONAL_ONLY`, + `Parameter.POSITIONAL_OR_KEYWORD`, `Parameter.VAR_POSITIONAL`, + `Parameter.KEYWORD_ONLY`, `Parameter.VAR_KEYWORD`. + ''' + + __slots__ = ('_name', '_kind', '_default', '_annotation', '_partial_kwarg') + + POSITIONAL_ONLY = _POSITIONAL_ONLY + POSITIONAL_OR_KEYWORD = _POSITIONAL_OR_KEYWORD + VAR_POSITIONAL = _VAR_POSITIONAL + KEYWORD_ONLY = _KEYWORD_ONLY + VAR_KEYWORD = _VAR_KEYWORD + + empty = _empty + + def __init__(self, name, kind, default=_empty, annotation=_empty, + _partial_kwarg=False): + + if kind not in (_POSITIONAL_ONLY, _POSITIONAL_OR_KEYWORD, + _VAR_POSITIONAL, _KEYWORD_ONLY, _VAR_KEYWORD): + raise ValueError("invalid value for 'Parameter.kind' attribute") + self._kind = kind + + if default is not _empty: + if kind in (_VAR_POSITIONAL, _VAR_KEYWORD): + msg = '{0} parameters cannot have default values'.format(kind) + raise ValueError(msg) + self._default = default + self._annotation = annotation + + if name is None: + if kind != _POSITIONAL_ONLY: + raise ValueError("None is not a valid name for a " + "non-positional-only parameter") + self._name = name + else: + name = str(name) + if kind != _POSITIONAL_ONLY and not re.match(r'[a-z_]\w*$', name, re.I): + msg = '{0!r} is not a valid parameter name'.format(name) + raise ValueError(msg) + self._name = name + + self._partial_kwarg = _partial_kwarg + + @property + def name(self): + return self._name + + @property + def default(self): + return self._default + + @property + def annotation(self): + return self._annotation + + @property + def kind(self): + return self._kind + + def replace(self, name=_void, kind=_void, annotation=_void, + default=_void, _partial_kwarg=_void): + '''Creates a customized copy of the Parameter.''' + + if name is _void: + name = self._name + + if kind is _void: + kind = self._kind + + if annotation is _void: + annotation = self._annotation + + if default is _void: + default = self._default + + if _partial_kwarg is _void: + _partial_kwarg = self._partial_kwarg + + return type(self)(name, kind, default=default, annotation=annotation, + _partial_kwarg=_partial_kwarg) + + def __str__(self): + kind = self.kind + + formatted = self._name + if kind == _POSITIONAL_ONLY: + if formatted is None: + formatted = '' + formatted = '<{0}>'.format(formatted) + + # Add annotation and default value + if self._annotation is not _empty: + formatted = '{0}:{1}'.format(formatted, + formatannotation(self._annotation)) + + if self._default is not _empty: + formatted = '{0}={1}'.format(formatted, repr(self._default)) + + if kind == _VAR_POSITIONAL: + formatted = '*' + formatted + elif kind == _VAR_KEYWORD: + formatted = '**' + formatted + + return formatted + + def __repr__(self): + return '<{0} at {1:#x} {2!r}>'.format(self.__class__.__name__, + id(self), self.name) + + def __hash__(self): + msg = "unhashable type: '{0}'".format(self.__class__.__name__) + raise TypeError(msg) + + def __eq__(self, other): + return (issubclass(other.__class__, Parameter) and + self._name == other._name and + self._kind == other._kind and + self._default == other._default and + self._annotation == other._annotation) + + def __ne__(self, other): + return not self.__eq__(other) + + +class BoundArguments(object): + '''Result of `Signature.bind` call. Holds the mapping of arguments + to the function's parameters. + + Has the following public attributes: + + * arguments : OrderedDict + An ordered mutable mapping of parameters' names to arguments' values. + Does not contain arguments' default values. + * signature : Signature + The Signature object that created this instance. + * args : tuple + Tuple of positional arguments values. + * kwargs : dict + Dict of keyword arguments values. + ''' + + def __init__(self, signature, arguments): + self.arguments = arguments + self._signature = signature + + @property + def signature(self): + return self._signature + + @property + def args(self): + args = [] + for param_name, param in self._signature.parameters.items(): + if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or + param._partial_kwarg): + # Keyword arguments mapped by 'functools.partial' + # (Parameter._partial_kwarg is True) are mapped + # in 'BoundArguments.kwargs', along with VAR_KEYWORD & + # KEYWORD_ONLY + break + + try: + arg = self.arguments[param_name] + except KeyError: + # We're done here. Other arguments + # will be mapped in 'BoundArguments.kwargs' + break + else: + if param.kind == _VAR_POSITIONAL: + # *args + args.extend(arg) + else: + # plain argument + args.append(arg) + + return tuple(args) + + @property + def kwargs(self): + kwargs = {} + kwargs_started = False + for param_name, param in self._signature.parameters.items(): + if not kwargs_started: + if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or + param._partial_kwarg): + kwargs_started = True + else: + if param_name not in self.arguments: + kwargs_started = True + continue + + if not kwargs_started: + continue + + try: + arg = self.arguments[param_name] + except KeyError: + pass + else: + if param.kind == _VAR_KEYWORD: + # **kwargs + kwargs.update(arg) + else: + # plain keyword argument + kwargs[param_name] = arg + + return kwargs + + def __hash__(self): + msg = "unhashable type: '{0}'".format(self.__class__.__name__) + raise TypeError(msg) + + def __eq__(self, other): + return (issubclass(other.__class__, BoundArguments) and + self.signature == other.signature and + self.arguments == other.arguments) + + def __ne__(self, other): + return not self.__eq__(other) + + +class Signature(object): + '''A Signature object represents the overall signature of a function. + It stores a Parameter object for each parameter accepted by the + function, as well as information specific to the function itself. + + A Signature object has the following public attributes and methods: + + * parameters : OrderedDict + An ordered mapping of parameters' names to the corresponding + Parameter objects (keyword-only arguments are in the same order + as listed in `code.co_varnames`). + * return_annotation : object + The annotation for the return type of the function if specified. + If the function has no annotation for its return type, this + attribute is not set. + * bind(*args, **kwargs) -> BoundArguments + Creates a mapping from positional and keyword arguments to + parameters. + * bind_partial(*args, **kwargs) -> BoundArguments + Creates a partial mapping from positional and keyword arguments + to parameters (simulating 'functools.partial' behavior.) + ''' + + __slots__ = ('_return_annotation', '_parameters') + + _parameter_cls = Parameter + _bound_arguments_cls = BoundArguments + + empty = _empty + + def __init__(self, parameters=None, return_annotation=_empty, + __validate_parameters__=True): + '''Constructs Signature from the given list of Parameter + objects and 'return_annotation'. All arguments are optional. + ''' + + if parameters is None: + params = OrderedDict() + else: + if __validate_parameters__: + params = OrderedDict() + top_kind = _POSITIONAL_ONLY + + for idx, param in enumerate(parameters): + kind = param.kind + if kind < top_kind: + msg = 'wrong parameter order: {0} before {1}' + msg = msg.format(top_kind, param.kind) + raise ValueError(msg) + else: + top_kind = kind + + name = param.name + if name is None: + name = str(idx) + param = param.replace(name=name) + + if name in params: + msg = 'duplicate parameter name: {0!r}'.format(name) + raise ValueError(msg) + params[name] = param + else: + params = OrderedDict(((param.name, param) + for param in parameters)) + + self._parameters = params + self._return_annotation = return_annotation + + @classmethod + def from_function(cls, func): + '''Constructs Signature for the given python function''' + + if not isinstance(func, types.FunctionType): + raise TypeError('{0!r} is not a Python function'.format(func)) + + Parameter = cls._parameter_cls + + # Parameter information. + func_code = func.__code__ + pos_count = func_code.co_argcount + arg_names = func_code.co_varnames + positional = tuple(arg_names[:pos_count]) + keyword_only_count = getattr(func_code, 'co_kwonlyargcount', 0) + keyword_only = arg_names[pos_count:(pos_count + keyword_only_count)] + annotations = getattr(func, '__annotations__', {}) + defaults = func.__defaults__ + kwdefaults = getattr(func, '__kwdefaults__', None) + + if defaults: + pos_default_count = len(defaults) + else: + pos_default_count = 0 + + parameters = [] + + # Non-keyword-only parameters w/o defaults. + non_default_count = pos_count - pos_default_count + for name in positional[:non_default_count]: + annotation = annotations.get(name, _empty) + parameters.append(Parameter(name, annotation=annotation, + kind=_POSITIONAL_OR_KEYWORD)) + + # ... w/ defaults. + for offset, name in enumerate(positional[non_default_count:]): + annotation = annotations.get(name, _empty) + parameters.append(Parameter(name, annotation=annotation, + kind=_POSITIONAL_OR_KEYWORD, + default=defaults[offset])) + + # *args + if func_code.co_flags & 0x04: + name = arg_names[pos_count + keyword_only_count] + annotation = annotations.get(name, _empty) + parameters.append(Parameter(name, annotation=annotation, + kind=_VAR_POSITIONAL)) + + # Keyword-only parameters. + for name in keyword_only: + default = _empty + if kwdefaults is not None: + default = kwdefaults.get(name, _empty) + + annotation = annotations.get(name, _empty) + parameters.append(Parameter(name, annotation=annotation, + kind=_KEYWORD_ONLY, + default=default)) + # **kwargs + if func_code.co_flags & 0x08: + index = pos_count + keyword_only_count + if func_code.co_flags & 0x04: + index += 1 + + name = arg_names[index] + annotation = annotations.get(name, _empty) + parameters.append(Parameter(name, annotation=annotation, + kind=_VAR_KEYWORD)) + + return cls(parameters, + return_annotation=annotations.get('return', _empty), + __validate_parameters__=False) + + @property + def parameters(self): + try: + return types.MappingProxyType(self._parameters) + except AttributeError: + return OrderedDict(self._parameters.items()) + + @property + def return_annotation(self): + return self._return_annotation + + def replace(self, parameters=_void, return_annotation=_void): + '''Creates a customized copy of the Signature. + Pass 'parameters' and/or 'return_annotation' arguments + to override them in the new copy. + ''' + + if parameters is _void: + parameters = self.parameters.values() + + if return_annotation is _void: + return_annotation = self._return_annotation + + return type(self)(parameters, + return_annotation=return_annotation) + + def __hash__(self): + msg = "unhashable type: '{0}'".format(self.__class__.__name__) + raise TypeError(msg) + + def __eq__(self, other): + if (not issubclass(type(other), Signature) or + self.return_annotation != other.return_annotation or + len(self.parameters) != len(other.parameters)): + return False + + other_positions = dict((param, idx) + for idx, param in enumerate(other.parameters.keys())) + + for idx, (param_name, param) in enumerate(self.parameters.items()): + if param.kind == _KEYWORD_ONLY: + try: + other_param = other.parameters[param_name] + except KeyError: + return False + else: + if param != other_param: + return False + else: + try: + other_idx = other_positions[param_name] + except KeyError: + return False + else: + if (idx != other_idx or + param != other.parameters[param_name]): + return False + + return True + + def __ne__(self, other): + return not self.__eq__(other) + + def _bind(self, args, kwargs, partial=False): + '''Private method. Don't use directly.''' + + arguments = OrderedDict() + + parameters = iter(self.parameters.values()) + parameters_ex = () + arg_vals = iter(args) + + if partial: + # Support for binding arguments to 'functools.partial' objects. + # See 'functools.partial' case in 'signature()' implementation + # for details. + for param_name, param in self.parameters.items(): + if (param._partial_kwarg and param_name not in kwargs): + # Simulating 'functools.partial' behavior + kwargs[param_name] = param.default + + while True: + # Let's iterate through the positional arguments and corresponding + # parameters + try: + arg_val = next(arg_vals) + except StopIteration: + # No more positional arguments + try: + param = next(parameters) + except StopIteration: + # No more parameters. That's it. Just need to check that + # we have no `kwargs` after this while loop + break + else: + if param.kind == _VAR_POSITIONAL: + # That's OK, just empty *args. Let's start parsing + # kwargs + break + elif param.name in kwargs: + if param.kind == _POSITIONAL_ONLY: + msg = '{arg!r} parameter is positional only, ' \ + 'but was passed as a keyword' + msg = msg.format(arg=param.name) + raise TypeError(msg) + parameters_ex = (param,) + break + elif (param.kind == _VAR_KEYWORD or + param.default is not _empty): + # That's fine too - we have a default value for this + # parameter. So, lets start parsing `kwargs`, starting + # with the current parameter + parameters_ex = (param,) + break + else: + if partial: + parameters_ex = (param,) + break + else: + msg = '{arg!r} parameter lacking default value' + msg = msg.format(arg=param.name) + raise TypeError(msg) + else: + # We have a positional argument to process + try: + param = next(parameters) + except StopIteration: + raise TypeError('too many positional arguments') + else: + if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY): + # Looks like we have no parameter for this positional + # argument + raise TypeError('too many positional arguments') + + if param.kind == _VAR_POSITIONAL: + # We have an '*args'-like argument, let's fill it with + # all positional arguments we have left and move on to + # the next phase + values = [arg_val] + values.extend(arg_vals) + arguments[param.name] = tuple(values) + break + + if param.name in kwargs: + raise TypeError('multiple values for argument ' + '{arg!r}'.format(arg=param.name)) + + arguments[param.name] = arg_val + + # Now, we iterate through the remaining parameters to process + # keyword arguments + kwargs_param = None + for param in itertools.chain(parameters_ex, parameters): + if param.kind == _POSITIONAL_ONLY: + # This should never happen in case of a properly built + # Signature object (but let's have this check here + # to ensure correct behaviour just in case) + raise TypeError('{arg!r} parameter is positional only, ' + 'but was passed as a keyword'. \ + format(arg=param.name)) + + if param.kind == _VAR_KEYWORD: + # Memorize that we have a '**kwargs'-like parameter + kwargs_param = param + continue + + param_name = param.name + try: + arg_val = kwargs.pop(param_name) + except KeyError: + # We have no value for this parameter. It's fine though, + # if it has a default value, or it is an '*args'-like + # parameter, left alone by the processing of positional + # arguments. + if (not partial and param.kind != _VAR_POSITIONAL and + param.default is _empty): + raise TypeError('{arg!r} parameter lacking default value'. \ + format(arg=param_name)) + + else: + arguments[param_name] = arg_val + + if kwargs: + if kwargs_param is not None: + # Process our '**kwargs'-like parameter + arguments[kwargs_param.name] = kwargs + else: + raise TypeError('too many keyword arguments %r' % kwargs) + + return self._bound_arguments_cls(self, arguments) + + def bind(*args, **kwargs): + '''Get a BoundArguments object, that maps the passed `args` + and `kwargs` to the function's signature. Raises `TypeError` + if the passed arguments can not be bound. + ''' + return args[0]._bind(args[1:], kwargs) + + def bind_partial(self, *args, **kwargs): + '''Get a BoundArguments object, that partially maps the + passed `args` and `kwargs` to the function's signature. + Raises `TypeError` if the passed arguments can not be bound. + ''' + return self._bind(args, kwargs, partial=True) + + def __str__(self): + result = [] + render_kw_only_separator = True + for idx, param in enumerate(self.parameters.values()): + formatted = str(param) + + kind = param.kind + if kind == _VAR_POSITIONAL: + # OK, we have an '*args'-like parameter, so we won't need + # a '*' to separate keyword-only arguments + render_kw_only_separator = False + elif kind == _KEYWORD_ONLY and render_kw_only_separator: + # We have a keyword-only parameter to render and we haven't + # rendered an '*args'-like parameter before, so add a '*' + # separator to the parameters list ("foo(arg1, *, arg2)" case) + result.append('*') + # This condition should be only triggered once, so + # reset the flag + render_kw_only_separator = False + + result.append(formatted) + + rendered = '({0})'.format(', '.join(result)) + + if self.return_annotation is not _empty: + anno = formatannotation(self.return_annotation) + rendered += ' -> {0}'.format(anno) + + return rendered diff --git a/lib/funcsigs/version.py b/lib/funcsigs/version.py new file mode 100644 index 00000000..7863915f --- /dev/null +++ b/lib/funcsigs/version.py @@ -0,0 +1 @@ +__version__ = "1.0.2" diff --git a/lib/pytz/LICENSE.txt b/lib/pytz/LICENSE.txt new file mode 100644 index 00000000..5e12fcca --- /dev/null +++ b/lib/pytz/LICENSE.txt @@ -0,0 +1,19 @@ +Copyright (c) 2003-2009 Stuart Bishop + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/lib/pytz/README.txt b/lib/pytz/README.txt new file mode 100644 index 00000000..8b216960 --- /dev/null +++ b/lib/pytz/README.txt @@ -0,0 +1,575 @@ +pytz - World Timezone Definitions for Python +============================================ + +:Author: Stuart Bishop + +Introduction +~~~~~~~~~~~~ + +pytz brings the Olson tz database into Python. This library allows +accurate and cross platform timezone calculations using Python 2.4 +or higher. It also solves the issue of ambiguous times at the end +of daylight saving time, which you can read more about in the Python +Library Reference (``datetime.tzinfo``). + +Almost all of the Olson timezones are supported. + +.. note:: + + This library differs from the documented Python API for + tzinfo implementations; if you want to create local wallclock + times you need to use the ``localize()`` method documented in this + document. In addition, if you perform date arithmetic on local + times that cross DST boundaries, the result may be in an incorrect + timezone (ie. subtract 1 minute from 2002-10-27 1:00 EST and you get + 2002-10-27 0:59 EST instead of the correct 2002-10-27 1:59 EDT). A + ``normalize()`` method is provided to correct this. Unfortunately these + issues cannot be resolved without modifying the Python datetime + implementation (see PEP-431). + + +Installation +~~~~~~~~~~~~ + +This package can either be installed from a .egg file using setuptools, +or from the tarball using the standard Python distutils. + +If you are installing from a tarball, run the following command as an +administrative user:: + + python setup.py install + +If you are installing using setuptools, you don't even need to download +anything as the latest version will be downloaded for you +from the Python package index:: + + easy_install --upgrade pytz + +If you already have the .egg file, you can use that too:: + + easy_install pytz-2008g-py2.6.egg + + +Example & Usage +~~~~~~~~~~~~~~~ + +Localized times and date arithmetic +----------------------------------- + +>>> from datetime import datetime, timedelta +>>> from pytz import timezone +>>> import pytz +>>> utc = pytz.utc +>>> utc.zone +'UTC' +>>> eastern = timezone('US/Eastern') +>>> eastern.zone +'US/Eastern' +>>> amsterdam = timezone('Europe/Amsterdam') +>>> fmt = '%Y-%m-%d %H:%M:%S %Z%z' + +This library only supports two ways of building a localized time. The +first is to use the ``localize()`` method provided by the pytz library. +This is used to localize a naive datetime (datetime with no timezone +information): + +>>> loc_dt = eastern.localize(datetime(2002, 10, 27, 6, 0, 0)) +>>> print(loc_dt.strftime(fmt)) +2002-10-27 06:00:00 EST-0500 + +The second way of building a localized time is by converting an existing +localized time using the standard ``astimezone()`` method: + +>>> ams_dt = loc_dt.astimezone(amsterdam) +>>> ams_dt.strftime(fmt) +'2002-10-27 12:00:00 CET+0100' + +Unfortunately using the tzinfo argument of the standard datetime +constructors ''does not work'' with pytz for many timezones. + +>>> datetime(2002, 10, 27, 12, 0, 0, tzinfo=amsterdam).strftime(fmt) +'2002-10-27 12:00:00 LMT+0020' + +It is safe for timezones without daylight saving transitions though, such +as UTC: + +>>> datetime(2002, 10, 27, 12, 0, 0, tzinfo=pytz.utc).strftime(fmt) +'2002-10-27 12:00:00 UTC+0000' + +The preferred way of dealing with times is to always work in UTC, +converting to localtime only when generating output to be read +by humans. + +>>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc) +>>> loc_dt = utc_dt.astimezone(eastern) +>>> loc_dt.strftime(fmt) +'2002-10-27 01:00:00 EST-0500' + +This library also allows you to do date arithmetic using local +times, although it is more complicated than working in UTC as you +need to use the ``normalize()`` method to handle daylight saving time +and other timezone transitions. In this example, ``loc_dt`` is set +to the instant when daylight saving time ends in the US/Eastern +timezone. + +>>> before = loc_dt - timedelta(minutes=10) +>>> before.strftime(fmt) +'2002-10-27 00:50:00 EST-0500' +>>> eastern.normalize(before).strftime(fmt) +'2002-10-27 01:50:00 EDT-0400' +>>> after = eastern.normalize(before + timedelta(minutes=20)) +>>> after.strftime(fmt) +'2002-10-27 01:10:00 EST-0500' + +Creating local times is also tricky, and the reason why working with +local times is not recommended. Unfortunately, you cannot just pass +a ``tzinfo`` argument when constructing a datetime (see the next +section for more details) + +>>> dt = datetime(2002, 10, 27, 1, 30, 0) +>>> dt1 = eastern.localize(dt, is_dst=True) +>>> dt1.strftime(fmt) +'2002-10-27 01:30:00 EDT-0400' +>>> dt2 = eastern.localize(dt, is_dst=False) +>>> dt2.strftime(fmt) +'2002-10-27 01:30:00 EST-0500' + +Converting between timezones also needs special attention. We also need +to use the ``normalize()`` method to ensure the conversion is correct. + +>>> utc_dt = utc.localize(datetime.utcfromtimestamp(1143408899)) +>>> utc_dt.strftime(fmt) +'2006-03-26 21:34:59 UTC+0000' +>>> au_tz = timezone('Australia/Sydney') +>>> au_dt = au_tz.normalize(utc_dt.astimezone(au_tz)) +>>> au_dt.strftime(fmt) +'2006-03-27 08:34:59 AEDT+1100' +>>> utc_dt2 = utc.normalize(au_dt.astimezone(utc)) +>>> utc_dt2.strftime(fmt) +'2006-03-26 21:34:59 UTC+0000' + +You can take shortcuts when dealing with the UTC side of timezone +conversions. ``normalize()`` and ``localize()`` are not really +necessary when there are no daylight saving time transitions to +deal with. + +>>> utc_dt = datetime.utcfromtimestamp(1143408899).replace(tzinfo=utc) +>>> utc_dt.strftime(fmt) +'2006-03-26 21:34:59 UTC+0000' +>>> au_tz = timezone('Australia/Sydney') +>>> au_dt = au_tz.normalize(utc_dt.astimezone(au_tz)) +>>> au_dt.strftime(fmt) +'2006-03-27 08:34:59 AEDT+1100' +>>> utc_dt2 = au_dt.astimezone(utc) +>>> utc_dt2.strftime(fmt) +'2006-03-26 21:34:59 UTC+0000' + + +``tzinfo`` API +-------------- + +The ``tzinfo`` instances returned by the ``timezone()`` function have +been extended to cope with ambiguous times by adding an ``is_dst`` +parameter to the ``utcoffset()``, ``dst()`` && ``tzname()`` methods. + +>>> tz = timezone('America/St_Johns') + +>>> normal = datetime(2009, 9, 1) +>>> ambiguous = datetime(2009, 10, 31, 23, 30) + +The ``is_dst`` parameter is ignored for most timestamps. It is only used +during DST transition ambiguous periods to resulve that ambiguity. + +>>> tz.utcoffset(normal, is_dst=True) +datetime.timedelta(-1, 77400) +>>> tz.dst(normal, is_dst=True) +datetime.timedelta(0, 3600) +>>> tz.tzname(normal, is_dst=True) +'NDT' + +>>> tz.utcoffset(ambiguous, is_dst=True) +datetime.timedelta(-1, 77400) +>>> tz.dst(ambiguous, is_dst=True) +datetime.timedelta(0, 3600) +>>> tz.tzname(ambiguous, is_dst=True) +'NDT' + +>>> tz.utcoffset(normal, is_dst=False) +datetime.timedelta(-1, 77400) +>>> tz.dst(normal, is_dst=False) +datetime.timedelta(0, 3600) +>>> tz.tzname(normal, is_dst=False) +'NDT' + +>>> tz.utcoffset(ambiguous, is_dst=False) +datetime.timedelta(-1, 73800) +>>> tz.dst(ambiguous, is_dst=False) +datetime.timedelta(0) +>>> tz.tzname(ambiguous, is_dst=False) +'NST' + +If ``is_dst`` is not specified, ambiguous timestamps will raise +an ``pytz.exceptions.AmbiguousTimeError`` exception. + +>>> tz.utcoffset(normal) +datetime.timedelta(-1, 77400) +>>> tz.dst(normal) +datetime.timedelta(0, 3600) +>>> tz.tzname(normal) +'NDT' + +>>> import pytz.exceptions +>>> try: +... tz.utcoffset(ambiguous) +... except pytz.exceptions.AmbiguousTimeError: +... print('pytz.exceptions.AmbiguousTimeError: %s' % ambiguous) +pytz.exceptions.AmbiguousTimeError: 2009-10-31 23:30:00 +>>> try: +... tz.dst(ambiguous) +... except pytz.exceptions.AmbiguousTimeError: +... print('pytz.exceptions.AmbiguousTimeError: %s' % ambiguous) +pytz.exceptions.AmbiguousTimeError: 2009-10-31 23:30:00 +>>> try: +... tz.tzname(ambiguous) +... except pytz.exceptions.AmbiguousTimeError: +... print('pytz.exceptions.AmbiguousTimeError: %s' % ambiguous) +pytz.exceptions.AmbiguousTimeError: 2009-10-31 23:30:00 + + +Problems with Localtime +~~~~~~~~~~~~~~~~~~~~~~~ + +The major problem we have to deal with is that certain datetimes +may occur twice in a year. For example, in the US/Eastern timezone +on the last Sunday morning in October, the following sequence +happens: + + - 01:00 EDT occurs + - 1 hour later, instead of 2:00am the clock is turned back 1 hour + and 01:00 happens again (this time 01:00 EST) + +In fact, every instant between 01:00 and 02:00 occurs twice. This means +that if you try and create a time in the 'US/Eastern' timezone +the standard datetime syntax, there is no way to specify if you meant +before of after the end-of-daylight-saving-time transition. Using the +pytz custom syntax, the best you can do is make an educated guess: + +>>> loc_dt = eastern.localize(datetime(2002, 10, 27, 1, 30, 00)) +>>> loc_dt.strftime(fmt) +'2002-10-27 01:30:00 EST-0500' + +As you can see, the system has chosen one for you and there is a 50% +chance of it being out by one hour. For some applications, this does +not matter. However, if you are trying to schedule meetings with people +in different timezones or analyze log files it is not acceptable. + +The best and simplest solution is to stick with using UTC. The pytz +package encourages using UTC for internal timezone representation by +including a special UTC implementation based on the standard Python +reference implementation in the Python documentation. + +The UTC timezone unpickles to be the same instance, and pickles to a +smaller size than other pytz tzinfo instances. The UTC implementation +can be obtained as pytz.utc, pytz.UTC, or pytz.timezone('UTC'). + +>>> import pickle, pytz +>>> dt = datetime(2005, 3, 1, 14, 13, 21, tzinfo=utc) +>>> naive = dt.replace(tzinfo=None) +>>> p = pickle.dumps(dt, 1) +>>> naive_p = pickle.dumps(naive, 1) +>>> len(p) - len(naive_p) +17 +>>> new = pickle.loads(p) +>>> new == dt +True +>>> new is dt +False +>>> new.tzinfo is dt.tzinfo +True +>>> pytz.utc is pytz.UTC is pytz.timezone('UTC') +True + +Note that some other timezones are commonly thought of as the same (GMT, +Greenwich, Universal, etc.). The definition of UTC is distinct from these +other timezones, and they are not equivalent. For this reason, they will +not compare the same in Python. + +>>> utc == pytz.timezone('GMT') +False + +See the section `What is UTC`_, below. + +If you insist on working with local times, this library provides a +facility for constructing them unambiguously: + +>>> loc_dt = datetime(2002, 10, 27, 1, 30, 00) +>>> est_dt = eastern.localize(loc_dt, is_dst=True) +>>> edt_dt = eastern.localize(loc_dt, is_dst=False) +>>> print(est_dt.strftime(fmt) + ' / ' + edt_dt.strftime(fmt)) +2002-10-27 01:30:00 EDT-0400 / 2002-10-27 01:30:00 EST-0500 + +If you pass None as the is_dst flag to localize(), pytz will refuse to +guess and raise exceptions if you try to build ambiguous or non-existent +times. + +For example, 1:30am on 27th Oct 2002 happened twice in the US/Eastern +timezone when the clocks where put back at the end of Daylight Saving +Time: + +>>> dt = datetime(2002, 10, 27, 1, 30, 00) +>>> try: +... eastern.localize(dt, is_dst=None) +... except pytz.exceptions.AmbiguousTimeError: +... print('pytz.exceptions.AmbiguousTimeError: %s' % dt) +pytz.exceptions.AmbiguousTimeError: 2002-10-27 01:30:00 + +Similarly, 2:30am on 7th April 2002 never happened at all in the +US/Eastern timezone, as the clocks where put forward at 2:00am skipping +the entire hour: + +>>> dt = datetime(2002, 4, 7, 2, 30, 00) +>>> try: +... eastern.localize(dt, is_dst=None) +... except pytz.exceptions.NonExistentTimeError: +... print('pytz.exceptions.NonExistentTimeError: %s' % dt) +pytz.exceptions.NonExistentTimeError: 2002-04-07 02:30:00 + +Both of these exceptions share a common base class to make error handling +easier: + +>>> isinstance(pytz.AmbiguousTimeError(), pytz.InvalidTimeError) +True +>>> isinstance(pytz.NonExistentTimeError(), pytz.InvalidTimeError) +True + + +A special case is where countries change their timezone definitions +with no daylight savings time switch. For example, in 1915 Warsaw +switched from Warsaw time to Central European time with no daylight savings +transition. So at the stroke of midnight on August 5th 1915 the clocks +were wound back 24 minutes creating an ambiguous time period that cannot +be specified without referring to the timezone abbreviation or the +actual UTC offset. In this case midnight happened twice, neither time +during a daylight saving time period. pytz handles this transition by +treating the ambiguous period before the switch as daylight savings +time, and the ambiguous period after as standard time. + + +>>> warsaw = pytz.timezone('Europe/Warsaw') +>>> amb_dt1 = warsaw.localize(datetime(1915, 8, 4, 23, 59, 59), is_dst=True) +>>> amb_dt1.strftime(fmt) +'1915-08-04 23:59:59 WMT+0124' +>>> amb_dt2 = warsaw.localize(datetime(1915, 8, 4, 23, 59, 59), is_dst=False) +>>> amb_dt2.strftime(fmt) +'1915-08-04 23:59:59 CET+0100' +>>> switch_dt = warsaw.localize(datetime(1915, 8, 5, 00, 00, 00), is_dst=False) +>>> switch_dt.strftime(fmt) +'1915-08-05 00:00:00 CET+0100' +>>> str(switch_dt - amb_dt1) +'0:24:01' +>>> str(switch_dt - amb_dt2) +'0:00:01' + +The best way of creating a time during an ambiguous time period is +by converting from another timezone such as UTC: + +>>> utc_dt = datetime(1915, 8, 4, 22, 36, tzinfo=pytz.utc) +>>> utc_dt.astimezone(warsaw).strftime(fmt) +'1915-08-04 23:36:00 CET+0100' + +The standard Python way of handling all these ambiguities is not to +handle them, such as demonstrated in this example using the US/Eastern +timezone definition from the Python documentation (Note that this +implementation only works for dates between 1987 and 2006 - it is +included for tests only!): + +>>> from pytz.reference import Eastern # pytz.reference only for tests +>>> dt = datetime(2002, 10, 27, 0, 30, tzinfo=Eastern) +>>> str(dt) +'2002-10-27 00:30:00-04:00' +>>> str(dt + timedelta(hours=1)) +'2002-10-27 01:30:00-05:00' +>>> str(dt + timedelta(hours=2)) +'2002-10-27 02:30:00-05:00' +>>> str(dt + timedelta(hours=3)) +'2002-10-27 03:30:00-05:00' + +Notice the first two results? At first glance you might think they are +correct, but taking the UTC offset into account you find that they are +actually two hours appart instead of the 1 hour we asked for. + +>>> from pytz.reference import UTC # pytz.reference only for tests +>>> str(dt.astimezone(UTC)) +'2002-10-27 04:30:00+00:00' +>>> str((dt + timedelta(hours=1)).astimezone(UTC)) +'2002-10-27 06:30:00+00:00' + + +Country Information +~~~~~~~~~~~~~~~~~~~ + +A mechanism is provided to access the timezones commonly in use +for a particular country, looked up using the ISO 3166 country code. +It returns a list of strings that can be used to retrieve the relevant +tzinfo instance using ``pytz.timezone()``: + +>>> print(' '.join(pytz.country_timezones['nz'])) +Pacific/Auckland Pacific/Chatham + +The Olson database comes with a ISO 3166 country code to English country +name mapping that pytz exposes as a dictionary: + +>>> print(pytz.country_names['nz']) +New Zealand + + +What is UTC +~~~~~~~~~~~ + +'UTC' is `Coordinated Universal Time`_. It is a successor to, but distinct +from, Greenwich Mean Time (GMT) and the various definitions of Universal +Time. UTC is now the worldwide standard for regulating clocks and time +measurement. + +All other timezones are defined relative to UTC, and include offsets like +UTC+0800 - hours to add or subtract from UTC to derive the local time. No +daylight saving time occurs in UTC, making it a useful timezone to perform +date arithmetic without worrying about the confusion and ambiguities caused +by daylight saving time transitions, your country changing its timezone, or +mobile computers that roam through multiple timezones. + +.. _Coordinated Universal Time: https://en.wikipedia.org/wiki/Coordinated_Universal_Time + + +Helpers +~~~~~~~ + +There are two lists of timezones provided. + +``all_timezones`` is the exhaustive list of the timezone names that can +be used. + +>>> from pytz import all_timezones +>>> len(all_timezones) >= 500 +True +>>> 'Etc/Greenwich' in all_timezones +True + +``common_timezones`` is a list of useful, current timezones. It doesn't +contain deprecated zones or historical zones, except for a few I've +deemed in common usage, such as US/Eastern (open a bug report if you +think other timezones are deserving of being included here). It is also +a sequence of strings. + +>>> from pytz import common_timezones +>>> len(common_timezones) < len(all_timezones) +True +>>> 'Etc/Greenwich' in common_timezones +False +>>> 'Australia/Melbourne' in common_timezones +True +>>> 'US/Eastern' in common_timezones +True +>>> 'Canada/Eastern' in common_timezones +True +>>> 'US/Pacific-New' in all_timezones +True +>>> 'US/Pacific-New' in common_timezones +False + +Both ``common_timezones`` and ``all_timezones`` are alphabetically +sorted: + +>>> common_timezones_dupe = common_timezones[:] +>>> common_timezones_dupe.sort() +>>> common_timezones == common_timezones_dupe +True +>>> all_timezones_dupe = all_timezones[:] +>>> all_timezones_dupe.sort() +>>> all_timezones == all_timezones_dupe +True + +``all_timezones`` and ``common_timezones`` are also available as sets. + +>>> from pytz import all_timezones_set, common_timezones_set +>>> 'US/Eastern' in all_timezones_set +True +>>> 'US/Eastern' in common_timezones_set +True +>>> 'Australia/Victoria' in common_timezones_set +False + +You can also retrieve lists of timezones used by particular countries +using the ``country_timezones()`` function. It requires an ISO-3166 +two letter country code. + +>>> from pytz import country_timezones +>>> print(' '.join(country_timezones('ch'))) +Europe/Zurich +>>> print(' '.join(country_timezones('CH'))) +Europe/Zurich + + +License +~~~~~~~ + +MIT license. + +This code is also available as part of Zope 3 under the Zope Public +License, Version 2.1 (ZPL). + +I'm happy to relicense this code if necessary for inclusion in other +open source projects. + + +Latest Versions +~~~~~~~~~~~~~~~ + +This package will be updated after releases of the Olson timezone +database. The latest version can be downloaded from the `Python Package +Index `_. The code that is used +to generate this distribution is hosted on launchpad.net and available +using the `Bazaar version control system `_ +using:: + + bzr branch lp:pytz + +Announcements of new releases are made on +`Launchpad `_, and the +`Atom feed `_ +hosted there. + + +Bugs, Feature Requests & Patches +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Bugs can be reported using `Launchpad `_. + + +Issues & Limitations +~~~~~~~~~~~~~~~~~~~~ + +- Offsets from UTC are rounded to the nearest whole minute, so timezones + such as Europe/Amsterdam pre 1937 will be up to 30 seconds out. This + is a limitation of the Python datetime library. + +- If you think a timezone definition is incorrect, I probably can't fix + it. pytz is a direct translation of the Olson timezone database, and + changes to the timezone definitions need to be made to this source. + If you find errors they should be reported to the time zone mailing + list, linked from http://www.iana.org/time-zones. + + +Further Reading +~~~~~~~~~~~~~~~ + +More info than you want to know about timezones: +http://www.twinsun.com/tz/tz-link.htm + + +Contact +~~~~~~~ + +Stuart Bishop + + diff --git a/lib/pytz/__init__.py b/lib/pytz/__init__.py new file mode 100644 index 00000000..e5cbe56d --- /dev/null +++ b/lib/pytz/__init__.py @@ -0,0 +1,1513 @@ +''' +datetime.tzinfo timezone definitions generated from the +Olson timezone database: + + ftp://elsie.nci.nih.gov/pub/tz*.tar.gz + +See the datetime section of the Python Library Reference for information +on how to use these modules. +''' + +# The Olson database is updated several times a year. +OLSON_VERSION = '2014j' +VERSION = '2014.10' # Switching to pip compatible version numbering. +__version__ = VERSION + +OLSEN_VERSION = OLSON_VERSION # Old releases had this misspelling + +__all__ = [ + 'timezone', 'utc', 'country_timezones', 'country_names', + 'AmbiguousTimeError', 'InvalidTimeError', + 'NonExistentTimeError', 'UnknownTimeZoneError', + 'all_timezones', 'all_timezones_set', + 'common_timezones', 'common_timezones_set', + ] + +import sys, datetime, os.path, gettext + +try: + from pkg_resources import resource_stream +except ImportError: + resource_stream = None + +from pytz.exceptions import AmbiguousTimeError +from pytz.exceptions import InvalidTimeError +from pytz.exceptions import NonExistentTimeError +from pytz.exceptions import UnknownTimeZoneError +from pytz.lazy import LazyDict, LazyList, LazySet +from pytz.tzinfo import unpickler +from pytz.tzfile import build_tzinfo, _byte_string + + +try: + unicode + +except NameError: # Python 3.x + + # Python 3.x doesn't have unicode(), making writing code + # for Python 2.3 and Python 3.x a pain. + unicode = str + + def ascii(s): + r""" + >>> ascii('Hello') + 'Hello' + >>> ascii('\N{TRADE MARK SIGN}') #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + UnicodeEncodeError: ... + """ + s.encode('US-ASCII') # Raise an exception if not ASCII + return s # But return the original string - not a byte string. + +else: # Python 2.x + + def ascii(s): + r""" + >>> ascii('Hello') + 'Hello' + >>> ascii(u'Hello') + 'Hello' + >>> ascii(u'\N{TRADE MARK SIGN}') #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + UnicodeEncodeError: ... + """ + return s.encode('US-ASCII') + + +def open_resource(name): + """Open a resource from the zoneinfo subdir for reading. + + Uses the pkg_resources module if available and no standard file + found at the calculated location. + """ + name_parts = name.lstrip('/').split('/') + for part in name_parts: + if part == os.path.pardir or os.path.sep in part: + raise ValueError('Bad path segment: %r' % part) + filename = os.path.join(os.path.dirname(__file__), + 'zoneinfo', *name_parts) + if not os.path.exists(filename) and resource_stream is not None: + # http://bugs.launchpad.net/bugs/383171 - we avoid using this + # unless absolutely necessary to help when a broken version of + # pkg_resources is installed. + return resource_stream(__name__, 'zoneinfo/' + name) + return open(filename, 'rb') + + +def resource_exists(name): + """Return true if the given resource exists""" + try: + open_resource(name).close() + return True + except IOError: + return False + + +# Enable this when we get some translations? +# We want an i18n API that is useful to programs using Python's gettext +# module, as well as the Zope3 i18n package. Perhaps we should just provide +# the POT file and translations, and leave it up to callers to make use +# of them. +# +# t = gettext.translation( +# 'pytz', os.path.join(os.path.dirname(__file__), 'locales'), +# fallback=True +# ) +# def _(timezone_name): +# """Translate a timezone name using the current locale, returning Unicode""" +# return t.ugettext(timezone_name) + + +_tzinfo_cache = {} + +def timezone(zone): + r''' Return a datetime.tzinfo implementation for the given timezone + + >>> from datetime import datetime, timedelta + >>> utc = timezone('UTC') + >>> eastern = timezone('US/Eastern') + >>> eastern.zone + 'US/Eastern' + >>> timezone(unicode('US/Eastern')) is eastern + True + >>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc) + >>> loc_dt = utc_dt.astimezone(eastern) + >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' + >>> loc_dt.strftime(fmt) + '2002-10-27 01:00:00 EST (-0500)' + >>> (loc_dt - timedelta(minutes=10)).strftime(fmt) + '2002-10-27 00:50:00 EST (-0500)' + >>> eastern.normalize(loc_dt - timedelta(minutes=10)).strftime(fmt) + '2002-10-27 01:50:00 EDT (-0400)' + >>> (loc_dt + timedelta(minutes=10)).strftime(fmt) + '2002-10-27 01:10:00 EST (-0500)' + + Raises UnknownTimeZoneError if passed an unknown zone. + + >>> try: + ... timezone('Asia/Shangri-La') + ... except UnknownTimeZoneError: + ... print('Unknown') + Unknown + + >>> try: + ... timezone(unicode('\N{TRADE MARK SIGN}')) + ... except UnknownTimeZoneError: + ... print('Unknown') + Unknown + + ''' + if zone.upper() == 'UTC': + return utc + + try: + zone = ascii(zone) + except UnicodeEncodeError: + # All valid timezones are ASCII + raise UnknownTimeZoneError(zone) + + zone = _unmunge_zone(zone) + if zone not in _tzinfo_cache: + if zone in all_timezones_set: + fp = open_resource(zone) + try: + _tzinfo_cache[zone] = build_tzinfo(zone, fp) + finally: + fp.close() + else: + raise UnknownTimeZoneError(zone) + + return _tzinfo_cache[zone] + + +def _unmunge_zone(zone): + """Undo the time zone name munging done by older versions of pytz.""" + return zone.replace('_plus_', '+').replace('_minus_', '-') + + +ZERO = datetime.timedelta(0) +HOUR = datetime.timedelta(hours=1) + + +class UTC(datetime.tzinfo): + """UTC + + Optimized UTC implementation. It unpickles using the single module global + instance defined beneath this class declaration. + """ + zone = "UTC" + + _utcoffset = ZERO + _dst = ZERO + _tzname = zone + + def fromutc(self, dt): + if dt.tzinfo is None: + return self.localize(dt) + return super(utc.__class__, self).fromutc(dt) + + def utcoffset(self, dt): + return ZERO + + def tzname(self, dt): + return "UTC" + + def dst(self, dt): + return ZERO + + def __reduce__(self): + return _UTC, () + + def localize(self, dt, is_dst=False): + '''Convert naive time to local time''' + if dt.tzinfo is not None: + raise ValueError('Not naive datetime (tzinfo is already set)') + return dt.replace(tzinfo=self) + + def normalize(self, dt, is_dst=False): + '''Correct the timezone information on the given datetime''' + if dt.tzinfo is self: + return dt + if dt.tzinfo is None: + raise ValueError('Naive time - no tzinfo set') + return dt.astimezone(self) + + def __repr__(self): + return "" + + def __str__(self): + return "UTC" + + +UTC = utc = UTC() # UTC is a singleton + + +def _UTC(): + """Factory function for utc unpickling. + + Makes sure that unpickling a utc instance always returns the same + module global. + + These examples belong in the UTC class above, but it is obscured; or in + the README.txt, but we are not depending on Python 2.4 so integrating + the README.txt examples with the unit tests is not trivial. + + >>> import datetime, pickle + >>> dt = datetime.datetime(2005, 3, 1, 14, 13, 21, tzinfo=utc) + >>> naive = dt.replace(tzinfo=None) + >>> p = pickle.dumps(dt, 1) + >>> naive_p = pickle.dumps(naive, 1) + >>> len(p) - len(naive_p) + 17 + >>> new = pickle.loads(p) + >>> new == dt + True + >>> new is dt + False + >>> new.tzinfo is dt.tzinfo + True + >>> utc is UTC is timezone('UTC') + True + >>> utc is timezone('GMT') + False + """ + return utc +_UTC.__safe_for_unpickling__ = True + + +def _p(*args): + """Factory function for unpickling pytz tzinfo instances. + + Just a wrapper around tzinfo.unpickler to save a few bytes in each pickle + by shortening the path. + """ + return unpickler(*args) +_p.__safe_for_unpickling__ = True + + + +class _CountryTimezoneDict(LazyDict): + """Map ISO 3166 country code to a list of timezone names commonly used + in that country. + + iso3166_code is the two letter code used to identify the country. + + >>> def print_list(list_of_strings): + ... 'We use a helper so doctests work under Python 2.3 -> 3.x' + ... for s in list_of_strings: + ... print(s) + + >>> print_list(country_timezones['nz']) + Pacific/Auckland + Pacific/Chatham + >>> print_list(country_timezones['ch']) + Europe/Zurich + >>> print_list(country_timezones['CH']) + Europe/Zurich + >>> print_list(country_timezones[unicode('ch')]) + Europe/Zurich + >>> print_list(country_timezones['XXX']) + Traceback (most recent call last): + ... + KeyError: 'XXX' + + Previously, this information was exposed as a function rather than a + dictionary. This is still supported:: + + >>> print_list(country_timezones('nz')) + Pacific/Auckland + Pacific/Chatham + """ + def __call__(self, iso3166_code): + """Backwards compatibility.""" + return self[iso3166_code] + + def _fill(self): + data = {} + zone_tab = open_resource('zone.tab') + try: + for line in zone_tab: + line = line.decode('US-ASCII') + if line.startswith('#'): + continue + code, coordinates, zone = line.split(None, 4)[:3] + if zone not in all_timezones_set: + continue + try: + data[code].append(zone) + except KeyError: + data[code] = [zone] + self.data = data + finally: + zone_tab.close() + +country_timezones = _CountryTimezoneDict() + + +class _CountryNameDict(LazyDict): + '''Dictionary proving ISO3166 code -> English name. + + >>> print(country_names['au']) + Australia + ''' + def _fill(self): + data = {} + zone_tab = open_resource('iso3166.tab') + try: + for line in zone_tab.readlines(): + line = line.decode('US-ASCII') + if line.startswith('#'): + continue + code, name = line.split(None, 1) + data[code] = name.strip() + self.data = data + finally: + zone_tab.close() + +country_names = _CountryNameDict() + + +# Time-zone info based solely on fixed offsets + +class _FixedOffset(datetime.tzinfo): + + zone = None # to match the standard pytz API + + def __init__(self, minutes): + if abs(minutes) >= 1440: + raise ValueError("absolute offset is too large", minutes) + self._minutes = minutes + self._offset = datetime.timedelta(minutes=minutes) + + def utcoffset(self, dt): + return self._offset + + def __reduce__(self): + return FixedOffset, (self._minutes, ) + + def dst(self, dt): + return ZERO + + def tzname(self, dt): + return None + + def __repr__(self): + return 'pytz.FixedOffset(%d)' % self._minutes + + def localize(self, dt, is_dst=False): + '''Convert naive time to local time''' + if dt.tzinfo is not None: + raise ValueError('Not naive datetime (tzinfo is already set)') + return dt.replace(tzinfo=self) + + def normalize(self, dt, is_dst=False): + '''Correct the timezone information on the given datetime''' + if dt.tzinfo is None: + raise ValueError('Naive time - no tzinfo set') + return dt.replace(tzinfo=self) + + +def FixedOffset(offset, _tzinfos = {}): + """return a fixed-offset timezone based off a number of minutes. + + >>> one = FixedOffset(-330) + >>> one + pytz.FixedOffset(-330) + >>> one.utcoffset(datetime.datetime.now()) + datetime.timedelta(-1, 66600) + >>> one.dst(datetime.datetime.now()) + datetime.timedelta(0) + + >>> two = FixedOffset(1380) + >>> two + pytz.FixedOffset(1380) + >>> two.utcoffset(datetime.datetime.now()) + datetime.timedelta(0, 82800) + >>> two.dst(datetime.datetime.now()) + datetime.timedelta(0) + + The datetime.timedelta must be between the range of -1 and 1 day, + non-inclusive. + + >>> FixedOffset(1440) + Traceback (most recent call last): + ... + ValueError: ('absolute offset is too large', 1440) + + >>> FixedOffset(-1440) + Traceback (most recent call last): + ... + ValueError: ('absolute offset is too large', -1440) + + An offset of 0 is special-cased to return UTC. + + >>> FixedOffset(0) is UTC + True + + There should always be only one instance of a FixedOffset per timedelta. + This should be true for multiple creation calls. + + >>> FixedOffset(-330) is one + True + >>> FixedOffset(1380) is two + True + + It should also be true for pickling. + + >>> import pickle + >>> pickle.loads(pickle.dumps(one)) is one + True + >>> pickle.loads(pickle.dumps(two)) is two + True + """ + if offset == 0: + return UTC + + info = _tzinfos.get(offset) + if info is None: + # We haven't seen this one before. we need to save it. + + # Use setdefault to avoid a race condition and make sure we have + # only one + info = _tzinfos.setdefault(offset, _FixedOffset(offset)) + + return info + +FixedOffset.__safe_for_unpickling__ = True + + +def _test(): + import doctest, os, sys + sys.path.insert(0, os.pardir) + import pytz + return doctest.testmod(pytz) + +if __name__ == '__main__': + _test() + +all_timezones = \ +['Africa/Abidjan', + 'Africa/Accra', + 'Africa/Addis_Ababa', + 'Africa/Algiers', + 'Africa/Asmara', + 'Africa/Asmera', + 'Africa/Bamako', + 'Africa/Bangui', + 'Africa/Banjul', + 'Africa/Bissau', + 'Africa/Blantyre', + 'Africa/Brazzaville', + 'Africa/Bujumbura', + 'Africa/Cairo', + 'Africa/Casablanca', + 'Africa/Ceuta', + 'Africa/Conakry', + 'Africa/Dakar', + 'Africa/Dar_es_Salaam', + 'Africa/Djibouti', + 'Africa/Douala', + 'Africa/El_Aaiun', + 'Africa/Freetown', + 'Africa/Gaborone', + 'Africa/Harare', + 'Africa/Johannesburg', + 'Africa/Juba', + 'Africa/Kampala', + 'Africa/Khartoum', + 'Africa/Kigali', + 'Africa/Kinshasa', + 'Africa/Lagos', + 'Africa/Libreville', + 'Africa/Lome', + 'Africa/Luanda', + 'Africa/Lubumbashi', + 'Africa/Lusaka', + 'Africa/Malabo', + 'Africa/Maputo', + 'Africa/Maseru', + 'Africa/Mbabane', + 'Africa/Mogadishu', + 'Africa/Monrovia', + 'Africa/Nairobi', + 'Africa/Ndjamena', + 'Africa/Niamey', + 'Africa/Nouakchott', + 'Africa/Ouagadougou', + 'Africa/Porto-Novo', + 'Africa/Sao_Tome', + 'Africa/Timbuktu', + 'Africa/Tripoli', + 'Africa/Tunis', + 'Africa/Windhoek', + 'America/Adak', + 'America/Anchorage', + 'America/Anguilla', + 'America/Antigua', + 'America/Araguaina', + 'America/Argentina/Buenos_Aires', + 'America/Argentina/Catamarca', + 'America/Argentina/ComodRivadavia', + 'America/Argentina/Cordoba', + 'America/Argentina/Jujuy', + 'America/Argentina/La_Rioja', + 'America/Argentina/Mendoza', + 'America/Argentina/Rio_Gallegos', + 'America/Argentina/Salta', + 'America/Argentina/San_Juan', + 'America/Argentina/San_Luis', + 'America/Argentina/Tucuman', + 'America/Argentina/Ushuaia', + 'America/Aruba', + 'America/Asuncion', + 'America/Atikokan', + 'America/Atka', + 'America/Bahia', + 'America/Bahia_Banderas', + 'America/Barbados', + 'America/Belem', + 'America/Belize', + 'America/Blanc-Sablon', + 'America/Boa_Vista', + 'America/Bogota', + 'America/Boise', + 'America/Buenos_Aires', + 'America/Cambridge_Bay', + 'America/Campo_Grande', + 'America/Cancun', + 'America/Caracas', + 'America/Catamarca', + 'America/Cayenne', + 'America/Cayman', + 'America/Chicago', + 'America/Chihuahua', + 'America/Coral_Harbour', + 'America/Cordoba', + 'America/Costa_Rica', + 'America/Creston', + 'America/Cuiaba', + 'America/Curacao', + 'America/Danmarkshavn', + 'America/Dawson', + 'America/Dawson_Creek', + 'America/Denver', + 'America/Detroit', + 'America/Dominica', + 'America/Edmonton', + 'America/Eirunepe', + 'America/El_Salvador', + 'America/Ensenada', + 'America/Fort_Wayne', + 'America/Fortaleza', + 'America/Glace_Bay', + 'America/Godthab', + 'America/Goose_Bay', + 'America/Grand_Turk', + 'America/Grenada', + 'America/Guadeloupe', + 'America/Guatemala', + 'America/Guayaquil', + 'America/Guyana', + 'America/Halifax', + 'America/Havana', + 'America/Hermosillo', + 'America/Indiana/Indianapolis', + 'America/Indiana/Knox', + 'America/Indiana/Marengo', + 'America/Indiana/Petersburg', + 'America/Indiana/Tell_City', + 'America/Indiana/Vevay', + 'America/Indiana/Vincennes', + 'America/Indiana/Winamac', + 'America/Indianapolis', + 'America/Inuvik', + 'America/Iqaluit', + 'America/Jamaica', + 'America/Jujuy', + 'America/Juneau', + 'America/Kentucky/Louisville', + 'America/Kentucky/Monticello', + 'America/Knox_IN', + 'America/Kralendijk', + 'America/La_Paz', + 'America/Lima', + 'America/Los_Angeles', + 'America/Louisville', + 'America/Lower_Princes', + 'America/Maceio', + 'America/Managua', + 'America/Manaus', + 'America/Marigot', + 'America/Martinique', + 'America/Matamoros', + 'America/Mazatlan', + 'America/Mendoza', + 'America/Menominee', + 'America/Merida', + 'America/Metlakatla', + 'America/Mexico_City', + 'America/Miquelon', + 'America/Moncton', + 'America/Monterrey', + 'America/Montevideo', + 'America/Montreal', + 'America/Montserrat', + 'America/Nassau', + 'America/New_York', + 'America/Nipigon', + 'America/Nome', + 'America/Noronha', + 'America/North_Dakota/Beulah', + 'America/North_Dakota/Center', + 'America/North_Dakota/New_Salem', + 'America/Ojinaga', + 'America/Panama', + 'America/Pangnirtung', + 'America/Paramaribo', + 'America/Phoenix', + 'America/Port-au-Prince', + 'America/Port_of_Spain', + 'America/Porto_Acre', + 'America/Porto_Velho', + 'America/Puerto_Rico', + 'America/Rainy_River', + 'America/Rankin_Inlet', + 'America/Recife', + 'America/Regina', + 'America/Resolute', + 'America/Rio_Branco', + 'America/Rosario', + 'America/Santa_Isabel', + 'America/Santarem', + 'America/Santiago', + 'America/Santo_Domingo', + 'America/Sao_Paulo', + 'America/Scoresbysund', + 'America/Shiprock', + 'America/Sitka', + 'America/St_Barthelemy', + 'America/St_Johns', + 'America/St_Kitts', + 'America/St_Lucia', + 'America/St_Thomas', + 'America/St_Vincent', + 'America/Swift_Current', + 'America/Tegucigalpa', + 'America/Thule', + 'America/Thunder_Bay', + 'America/Tijuana', + 'America/Toronto', + 'America/Tortola', + 'America/Vancouver', + 'America/Virgin', + 'America/Whitehorse', + 'America/Winnipeg', + 'America/Yakutat', + 'America/Yellowknife', + 'Antarctica/Casey', + 'Antarctica/Davis', + 'Antarctica/DumontDUrville', + 'Antarctica/Macquarie', + 'Antarctica/Mawson', + 'Antarctica/McMurdo', + 'Antarctica/Palmer', + 'Antarctica/Rothera', + 'Antarctica/South_Pole', + 'Antarctica/Syowa', + 'Antarctica/Troll', + 'Antarctica/Vostok', + 'Arctic/Longyearbyen', + 'Asia/Aden', + 'Asia/Almaty', + 'Asia/Amman', + 'Asia/Anadyr', + 'Asia/Aqtau', + 'Asia/Aqtobe', + 'Asia/Ashgabat', + 'Asia/Ashkhabad', + 'Asia/Baghdad', + 'Asia/Bahrain', + 'Asia/Baku', + 'Asia/Bangkok', + 'Asia/Beirut', + 'Asia/Bishkek', + 'Asia/Brunei', + 'Asia/Calcutta', + 'Asia/Chita', + 'Asia/Choibalsan', + 'Asia/Chongqing', + 'Asia/Chungking', + 'Asia/Colombo', + 'Asia/Dacca', + 'Asia/Damascus', + 'Asia/Dhaka', + 'Asia/Dili', + 'Asia/Dubai', + 'Asia/Dushanbe', + 'Asia/Gaza', + 'Asia/Harbin', + 'Asia/Hebron', + 'Asia/Ho_Chi_Minh', + 'Asia/Hong_Kong', + 'Asia/Hovd', + 'Asia/Irkutsk', + 'Asia/Istanbul', + 'Asia/Jakarta', + 'Asia/Jayapura', + 'Asia/Jerusalem', + 'Asia/Kabul', + 'Asia/Kamchatka', + 'Asia/Karachi', + 'Asia/Kashgar', + 'Asia/Kathmandu', + 'Asia/Katmandu', + 'Asia/Khandyga', + 'Asia/Kolkata', + 'Asia/Krasnoyarsk', + 'Asia/Kuala_Lumpur', + 'Asia/Kuching', + 'Asia/Kuwait', + 'Asia/Macao', + 'Asia/Macau', + 'Asia/Magadan', + 'Asia/Makassar', + 'Asia/Manila', + 'Asia/Muscat', + 'Asia/Nicosia', + 'Asia/Novokuznetsk', + 'Asia/Novosibirsk', + 'Asia/Omsk', + 'Asia/Oral', + 'Asia/Phnom_Penh', + 'Asia/Pontianak', + 'Asia/Pyongyang', + 'Asia/Qatar', + 'Asia/Qyzylorda', + 'Asia/Rangoon', + 'Asia/Riyadh', + 'Asia/Saigon', + 'Asia/Sakhalin', + 'Asia/Samarkand', + 'Asia/Seoul', + 'Asia/Shanghai', + 'Asia/Singapore', + 'Asia/Srednekolymsk', + 'Asia/Taipei', + 'Asia/Tashkent', + 'Asia/Tbilisi', + 'Asia/Tehran', + 'Asia/Tel_Aviv', + 'Asia/Thimbu', + 'Asia/Thimphu', + 'Asia/Tokyo', + 'Asia/Ujung_Pandang', + 'Asia/Ulaanbaatar', + 'Asia/Ulan_Bator', + 'Asia/Urumqi', + 'Asia/Ust-Nera', + 'Asia/Vientiane', + 'Asia/Vladivostok', + 'Asia/Yakutsk', + 'Asia/Yekaterinburg', + 'Asia/Yerevan', + 'Atlantic/Azores', + 'Atlantic/Bermuda', + 'Atlantic/Canary', + 'Atlantic/Cape_Verde', + 'Atlantic/Faeroe', + 'Atlantic/Faroe', + 'Atlantic/Jan_Mayen', + 'Atlantic/Madeira', + 'Atlantic/Reykjavik', + 'Atlantic/South_Georgia', + 'Atlantic/St_Helena', + 'Atlantic/Stanley', + 'Australia/ACT', + 'Australia/Adelaide', + 'Australia/Brisbane', + 'Australia/Broken_Hill', + 'Australia/Canberra', + 'Australia/Currie', + 'Australia/Darwin', + 'Australia/Eucla', + 'Australia/Hobart', + 'Australia/LHI', + 'Australia/Lindeman', + 'Australia/Lord_Howe', + 'Australia/Melbourne', + 'Australia/NSW', + 'Australia/North', + 'Australia/Perth', + 'Australia/Queensland', + 'Australia/South', + 'Australia/Sydney', + 'Australia/Tasmania', + 'Australia/Victoria', + 'Australia/West', + 'Australia/Yancowinna', + 'Brazil/Acre', + 'Brazil/DeNoronha', + 'Brazil/East', + 'Brazil/West', + 'CET', + 'CST6CDT', + 'Canada/Atlantic', + 'Canada/Central', + 'Canada/East-Saskatchewan', + 'Canada/Eastern', + 'Canada/Mountain', + 'Canada/Newfoundland', + 'Canada/Pacific', + 'Canada/Saskatchewan', + 'Canada/Yukon', + 'Chile/Continental', + 'Chile/EasterIsland', + 'Cuba', + 'EET', + 'EST', + 'EST5EDT', + 'Egypt', + 'Eire', + 'Etc/GMT', + 'Etc/GMT+0', + 'Etc/GMT+1', + 'Etc/GMT+10', + 'Etc/GMT+11', + 'Etc/GMT+12', + 'Etc/GMT+2', + 'Etc/GMT+3', + 'Etc/GMT+4', + 'Etc/GMT+5', + 'Etc/GMT+6', + 'Etc/GMT+7', + 'Etc/GMT+8', + 'Etc/GMT+9', + 'Etc/GMT-0', + 'Etc/GMT-1', + 'Etc/GMT-10', + 'Etc/GMT-11', + 'Etc/GMT-12', + 'Etc/GMT-13', + 'Etc/GMT-14', + 'Etc/GMT-2', + 'Etc/GMT-3', + 'Etc/GMT-4', + 'Etc/GMT-5', + 'Etc/GMT-6', + 'Etc/GMT-7', + 'Etc/GMT-8', + 'Etc/GMT-9', + 'Etc/GMT0', + 'Etc/Greenwich', + 'Etc/UCT', + 'Etc/UTC', + 'Etc/Universal', + 'Etc/Zulu', + 'Europe/Amsterdam', + 'Europe/Andorra', + 'Europe/Athens', + 'Europe/Belfast', + 'Europe/Belgrade', + 'Europe/Berlin', + 'Europe/Bratislava', + 'Europe/Brussels', + 'Europe/Bucharest', + 'Europe/Budapest', + 'Europe/Busingen', + 'Europe/Chisinau', + 'Europe/Copenhagen', + 'Europe/Dublin', + 'Europe/Gibraltar', + 'Europe/Guernsey', + 'Europe/Helsinki', + 'Europe/Isle_of_Man', + 'Europe/Istanbul', + 'Europe/Jersey', + 'Europe/Kaliningrad', + 'Europe/Kiev', + 'Europe/Lisbon', + 'Europe/Ljubljana', + 'Europe/London', + 'Europe/Luxembourg', + 'Europe/Madrid', + 'Europe/Malta', + 'Europe/Mariehamn', + 'Europe/Minsk', + 'Europe/Monaco', + 'Europe/Moscow', + 'Europe/Nicosia', + 'Europe/Oslo', + 'Europe/Paris', + 'Europe/Podgorica', + 'Europe/Prague', + 'Europe/Riga', + 'Europe/Rome', + 'Europe/Samara', + 'Europe/San_Marino', + 'Europe/Sarajevo', + 'Europe/Simferopol', + 'Europe/Skopje', + 'Europe/Sofia', + 'Europe/Stockholm', + 'Europe/Tallinn', + 'Europe/Tirane', + 'Europe/Tiraspol', + 'Europe/Uzhgorod', + 'Europe/Vaduz', + 'Europe/Vatican', + 'Europe/Vienna', + 'Europe/Vilnius', + 'Europe/Volgograd', + 'Europe/Warsaw', + 'Europe/Zagreb', + 'Europe/Zaporozhye', + 'Europe/Zurich', + 'GB', + 'GB-Eire', + 'GMT', + 'GMT+0', + 'GMT-0', + 'GMT0', + 'Greenwich', + 'HST', + 'Hongkong', + 'Iceland', + 'Indian/Antananarivo', + 'Indian/Chagos', + 'Indian/Christmas', + 'Indian/Cocos', + 'Indian/Comoro', + 'Indian/Kerguelen', + 'Indian/Mahe', + 'Indian/Maldives', + 'Indian/Mauritius', + 'Indian/Mayotte', + 'Indian/Reunion', + 'Iran', + 'Israel', + 'Jamaica', + 'Japan', + 'Kwajalein', + 'Libya', + 'MET', + 'MST', + 'MST7MDT', + 'Mexico/BajaNorte', + 'Mexico/BajaSur', + 'Mexico/General', + 'NZ', + 'NZ-CHAT', + 'Navajo', + 'PRC', + 'PST8PDT', + 'Pacific/Apia', + 'Pacific/Auckland', + 'Pacific/Bougainville', + 'Pacific/Chatham', + 'Pacific/Chuuk', + 'Pacific/Easter', + 'Pacific/Efate', + 'Pacific/Enderbury', + 'Pacific/Fakaofo', + 'Pacific/Fiji', + 'Pacific/Funafuti', + 'Pacific/Galapagos', + 'Pacific/Gambier', + 'Pacific/Guadalcanal', + 'Pacific/Guam', + 'Pacific/Honolulu', + 'Pacific/Johnston', + 'Pacific/Kiritimati', + 'Pacific/Kosrae', + 'Pacific/Kwajalein', + 'Pacific/Majuro', + 'Pacific/Marquesas', + 'Pacific/Midway', + 'Pacific/Nauru', + 'Pacific/Niue', + 'Pacific/Norfolk', + 'Pacific/Noumea', + 'Pacific/Pago_Pago', + 'Pacific/Palau', + 'Pacific/Pitcairn', + 'Pacific/Pohnpei', + 'Pacific/Ponape', + 'Pacific/Port_Moresby', + 'Pacific/Rarotonga', + 'Pacific/Saipan', + 'Pacific/Samoa', + 'Pacific/Tahiti', + 'Pacific/Tarawa', + 'Pacific/Tongatapu', + 'Pacific/Truk', + 'Pacific/Wake', + 'Pacific/Wallis', + 'Pacific/Yap', + 'Poland', + 'Portugal', + 'ROC', + 'ROK', + 'Singapore', + 'Turkey', + 'UCT', + 'US/Alaska', + 'US/Aleutian', + 'US/Arizona', + 'US/Central', + 'US/East-Indiana', + 'US/Eastern', + 'US/Hawaii', + 'US/Indiana-Starke', + 'US/Michigan', + 'US/Mountain', + 'US/Pacific', + 'US/Pacific-New', + 'US/Samoa', + 'UTC', + 'Universal', + 'W-SU', + 'WET', + 'Zulu'] +all_timezones = LazyList( + tz for tz in all_timezones if resource_exists(tz)) + +all_timezones_set = LazySet(all_timezones) +common_timezones = \ +['Africa/Abidjan', + 'Africa/Accra', + 'Africa/Addis_Ababa', + 'Africa/Algiers', + 'Africa/Asmara', + 'Africa/Bamako', + 'Africa/Bangui', + 'Africa/Banjul', + 'Africa/Bissau', + 'Africa/Blantyre', + 'Africa/Brazzaville', + 'Africa/Bujumbura', + 'Africa/Cairo', + 'Africa/Casablanca', + 'Africa/Ceuta', + 'Africa/Conakry', + 'Africa/Dakar', + 'Africa/Dar_es_Salaam', + 'Africa/Djibouti', + 'Africa/Douala', + 'Africa/El_Aaiun', + 'Africa/Freetown', + 'Africa/Gaborone', + 'Africa/Harare', + 'Africa/Johannesburg', + 'Africa/Juba', + 'Africa/Kampala', + 'Africa/Khartoum', + 'Africa/Kigali', + 'Africa/Kinshasa', + 'Africa/Lagos', + 'Africa/Libreville', + 'Africa/Lome', + 'Africa/Luanda', + 'Africa/Lubumbashi', + 'Africa/Lusaka', + 'Africa/Malabo', + 'Africa/Maputo', + 'Africa/Maseru', + 'Africa/Mbabane', + 'Africa/Mogadishu', + 'Africa/Monrovia', + 'Africa/Nairobi', + 'Africa/Ndjamena', + 'Africa/Niamey', + 'Africa/Nouakchott', + 'Africa/Ouagadougou', + 'Africa/Porto-Novo', + 'Africa/Sao_Tome', + 'Africa/Tripoli', + 'Africa/Tunis', + 'Africa/Windhoek', + 'America/Adak', + 'America/Anchorage', + 'America/Anguilla', + 'America/Antigua', + 'America/Araguaina', + 'America/Argentina/Buenos_Aires', + 'America/Argentina/Catamarca', + 'America/Argentina/Cordoba', + 'America/Argentina/Jujuy', + 'America/Argentina/La_Rioja', + 'America/Argentina/Mendoza', + 'America/Argentina/Rio_Gallegos', + 'America/Argentina/Salta', + 'America/Argentina/San_Juan', + 'America/Argentina/San_Luis', + 'America/Argentina/Tucuman', + 'America/Argentina/Ushuaia', + 'America/Aruba', + 'America/Asuncion', + 'America/Atikokan', + 'America/Bahia', + 'America/Bahia_Banderas', + 'America/Barbados', + 'America/Belem', + 'America/Belize', + 'America/Blanc-Sablon', + 'America/Boa_Vista', + 'America/Bogota', + 'America/Boise', + 'America/Cambridge_Bay', + 'America/Campo_Grande', + 'America/Cancun', + 'America/Caracas', + 'America/Cayenne', + 'America/Cayman', + 'America/Chicago', + 'America/Chihuahua', + 'America/Costa_Rica', + 'America/Creston', + 'America/Cuiaba', + 'America/Curacao', + 'America/Danmarkshavn', + 'America/Dawson', + 'America/Dawson_Creek', + 'America/Denver', + 'America/Detroit', + 'America/Dominica', + 'America/Edmonton', + 'America/Eirunepe', + 'America/El_Salvador', + 'America/Fortaleza', + 'America/Glace_Bay', + 'America/Godthab', + 'America/Goose_Bay', + 'America/Grand_Turk', + 'America/Grenada', + 'America/Guadeloupe', + 'America/Guatemala', + 'America/Guayaquil', + 'America/Guyana', + 'America/Halifax', + 'America/Havana', + 'America/Hermosillo', + 'America/Indiana/Indianapolis', + 'America/Indiana/Knox', + 'America/Indiana/Marengo', + 'America/Indiana/Petersburg', + 'America/Indiana/Tell_City', + 'America/Indiana/Vevay', + 'America/Indiana/Vincennes', + 'America/Indiana/Winamac', + 'America/Inuvik', + 'America/Iqaluit', + 'America/Jamaica', + 'America/Juneau', + 'America/Kentucky/Louisville', + 'America/Kentucky/Monticello', + 'America/Kralendijk', + 'America/La_Paz', + 'America/Lima', + 'America/Los_Angeles', + 'America/Lower_Princes', + 'America/Maceio', + 'America/Managua', + 'America/Manaus', + 'America/Marigot', + 'America/Martinique', + 'America/Matamoros', + 'America/Mazatlan', + 'America/Menominee', + 'America/Merida', + 'America/Metlakatla', + 'America/Mexico_City', + 'America/Miquelon', + 'America/Moncton', + 'America/Monterrey', + 'America/Montevideo', + 'America/Montreal', + 'America/Montserrat', + 'America/Nassau', + 'America/New_York', + 'America/Nipigon', + 'America/Nome', + 'America/Noronha', + 'America/North_Dakota/Beulah', + 'America/North_Dakota/Center', + 'America/North_Dakota/New_Salem', + 'America/Ojinaga', + 'America/Panama', + 'America/Pangnirtung', + 'America/Paramaribo', + 'America/Phoenix', + 'America/Port-au-Prince', + 'America/Port_of_Spain', + 'America/Porto_Velho', + 'America/Puerto_Rico', + 'America/Rainy_River', + 'America/Rankin_Inlet', + 'America/Recife', + 'America/Regina', + 'America/Resolute', + 'America/Rio_Branco', + 'America/Santa_Isabel', + 'America/Santarem', + 'America/Santiago', + 'America/Santo_Domingo', + 'America/Sao_Paulo', + 'America/Scoresbysund', + 'America/Sitka', + 'America/St_Barthelemy', + 'America/St_Johns', + 'America/St_Kitts', + 'America/St_Lucia', + 'America/St_Thomas', + 'America/St_Vincent', + 'America/Swift_Current', + 'America/Tegucigalpa', + 'America/Thule', + 'America/Thunder_Bay', + 'America/Tijuana', + 'America/Toronto', + 'America/Tortola', + 'America/Vancouver', + 'America/Whitehorse', + 'America/Winnipeg', + 'America/Yakutat', + 'America/Yellowknife', + 'Antarctica/Casey', + 'Antarctica/Davis', + 'Antarctica/DumontDUrville', + 'Antarctica/Macquarie', + 'Antarctica/Mawson', + 'Antarctica/McMurdo', + 'Antarctica/Palmer', + 'Antarctica/Rothera', + 'Antarctica/Syowa', + 'Antarctica/Troll', + 'Antarctica/Vostok', + 'Arctic/Longyearbyen', + 'Asia/Aden', + 'Asia/Almaty', + 'Asia/Amman', + 'Asia/Anadyr', + 'Asia/Aqtau', + 'Asia/Aqtobe', + 'Asia/Ashgabat', + 'Asia/Baghdad', + 'Asia/Bahrain', + 'Asia/Baku', + 'Asia/Bangkok', + 'Asia/Beirut', + 'Asia/Bishkek', + 'Asia/Brunei', + 'Asia/Chita', + 'Asia/Choibalsan', + 'Asia/Colombo', + 'Asia/Damascus', + 'Asia/Dhaka', + 'Asia/Dili', + 'Asia/Dubai', + 'Asia/Dushanbe', + 'Asia/Gaza', + 'Asia/Hebron', + 'Asia/Ho_Chi_Minh', + 'Asia/Hong_Kong', + 'Asia/Hovd', + 'Asia/Irkutsk', + 'Asia/Jakarta', + 'Asia/Jayapura', + 'Asia/Jerusalem', + 'Asia/Kabul', + 'Asia/Kamchatka', + 'Asia/Karachi', + 'Asia/Kathmandu', + 'Asia/Khandyga', + 'Asia/Kolkata', + 'Asia/Krasnoyarsk', + 'Asia/Kuala_Lumpur', + 'Asia/Kuching', + 'Asia/Kuwait', + 'Asia/Macau', + 'Asia/Magadan', + 'Asia/Makassar', + 'Asia/Manila', + 'Asia/Muscat', + 'Asia/Nicosia', + 'Asia/Novokuznetsk', + 'Asia/Novosibirsk', + 'Asia/Omsk', + 'Asia/Oral', + 'Asia/Phnom_Penh', + 'Asia/Pontianak', + 'Asia/Pyongyang', + 'Asia/Qatar', + 'Asia/Qyzylorda', + 'Asia/Rangoon', + 'Asia/Riyadh', + 'Asia/Sakhalin', + 'Asia/Samarkand', + 'Asia/Seoul', + 'Asia/Shanghai', + 'Asia/Singapore', + 'Asia/Srednekolymsk', + 'Asia/Taipei', + 'Asia/Tashkent', + 'Asia/Tbilisi', + 'Asia/Tehran', + 'Asia/Thimphu', + 'Asia/Tokyo', + 'Asia/Ulaanbaatar', + 'Asia/Urumqi', + 'Asia/Ust-Nera', + 'Asia/Vientiane', + 'Asia/Vladivostok', + 'Asia/Yakutsk', + 'Asia/Yekaterinburg', + 'Asia/Yerevan', + 'Atlantic/Azores', + 'Atlantic/Bermuda', + 'Atlantic/Canary', + 'Atlantic/Cape_Verde', + 'Atlantic/Faroe', + 'Atlantic/Madeira', + 'Atlantic/Reykjavik', + 'Atlantic/South_Georgia', + 'Atlantic/St_Helena', + 'Atlantic/Stanley', + 'Australia/Adelaide', + 'Australia/Brisbane', + 'Australia/Broken_Hill', + 'Australia/Currie', + 'Australia/Darwin', + 'Australia/Eucla', + 'Australia/Hobart', + 'Australia/Lindeman', + 'Australia/Lord_Howe', + 'Australia/Melbourne', + 'Australia/Perth', + 'Australia/Sydney', + 'Canada/Atlantic', + 'Canada/Central', + 'Canada/Eastern', + 'Canada/Mountain', + 'Canada/Newfoundland', + 'Canada/Pacific', + 'Europe/Amsterdam', + 'Europe/Andorra', + 'Europe/Athens', + 'Europe/Belgrade', + 'Europe/Berlin', + 'Europe/Bratislava', + 'Europe/Brussels', + 'Europe/Bucharest', + 'Europe/Budapest', + 'Europe/Busingen', + 'Europe/Chisinau', + 'Europe/Copenhagen', + 'Europe/Dublin', + 'Europe/Gibraltar', + 'Europe/Guernsey', + 'Europe/Helsinki', + 'Europe/Isle_of_Man', + 'Europe/Istanbul', + 'Europe/Jersey', + 'Europe/Kaliningrad', + 'Europe/Kiev', + 'Europe/Lisbon', + 'Europe/Ljubljana', + 'Europe/London', + 'Europe/Luxembourg', + 'Europe/Madrid', + 'Europe/Malta', + 'Europe/Mariehamn', + 'Europe/Minsk', + 'Europe/Monaco', + 'Europe/Moscow', + 'Europe/Oslo', + 'Europe/Paris', + 'Europe/Podgorica', + 'Europe/Prague', + 'Europe/Riga', + 'Europe/Rome', + 'Europe/Samara', + 'Europe/San_Marino', + 'Europe/Sarajevo', + 'Europe/Simferopol', + 'Europe/Skopje', + 'Europe/Sofia', + 'Europe/Stockholm', + 'Europe/Tallinn', + 'Europe/Tirane', + 'Europe/Uzhgorod', + 'Europe/Vaduz', + 'Europe/Vatican', + 'Europe/Vienna', + 'Europe/Vilnius', + 'Europe/Volgograd', + 'Europe/Warsaw', + 'Europe/Zagreb', + 'Europe/Zaporozhye', + 'Europe/Zurich', + 'GMT', + 'Indian/Antananarivo', + 'Indian/Chagos', + 'Indian/Christmas', + 'Indian/Cocos', + 'Indian/Comoro', + 'Indian/Kerguelen', + 'Indian/Mahe', + 'Indian/Maldives', + 'Indian/Mauritius', + 'Indian/Mayotte', + 'Indian/Reunion', + 'Pacific/Apia', + 'Pacific/Auckland', + 'Pacific/Bougainville', + 'Pacific/Chatham', + 'Pacific/Chuuk', + 'Pacific/Easter', + 'Pacific/Efate', + 'Pacific/Enderbury', + 'Pacific/Fakaofo', + 'Pacific/Fiji', + 'Pacific/Funafuti', + 'Pacific/Galapagos', + 'Pacific/Gambier', + 'Pacific/Guadalcanal', + 'Pacific/Guam', + 'Pacific/Honolulu', + 'Pacific/Johnston', + 'Pacific/Kiritimati', + 'Pacific/Kosrae', + 'Pacific/Kwajalein', + 'Pacific/Majuro', + 'Pacific/Marquesas', + 'Pacific/Midway', + 'Pacific/Nauru', + 'Pacific/Niue', + 'Pacific/Norfolk', + 'Pacific/Noumea', + 'Pacific/Pago_Pago', + 'Pacific/Palau', + 'Pacific/Pitcairn', + 'Pacific/Pohnpei', + 'Pacific/Port_Moresby', + 'Pacific/Rarotonga', + 'Pacific/Saipan', + 'Pacific/Tahiti', + 'Pacific/Tarawa', + 'Pacific/Tongatapu', + 'Pacific/Wake', + 'Pacific/Wallis', + 'US/Alaska', + 'US/Arizona', + 'US/Central', + 'US/Eastern', + 'US/Hawaii', + 'US/Mountain', + 'US/Pacific', + 'UTC'] +common_timezones = LazyList( + tz for tz in common_timezones if tz in all_timezones) + +common_timezones_set = LazySet(common_timezones) diff --git a/lib/pytz/exceptions.py b/lib/pytz/exceptions.py new file mode 100644 index 00000000..0376108e --- /dev/null +++ b/lib/pytz/exceptions.py @@ -0,0 +1,48 @@ +''' +Custom exceptions raised by pytz. +''' + +__all__ = [ + 'UnknownTimeZoneError', 'InvalidTimeError', 'AmbiguousTimeError', + 'NonExistentTimeError', + ] + + +class UnknownTimeZoneError(KeyError): + '''Exception raised when pytz is passed an unknown timezone. + + >>> isinstance(UnknownTimeZoneError(), LookupError) + True + + This class is actually a subclass of KeyError to provide backwards + compatibility with code relying on the undocumented behavior of earlier + pytz releases. + + >>> isinstance(UnknownTimeZoneError(), KeyError) + True + ''' + pass + + +class InvalidTimeError(Exception): + '''Base class for invalid time exceptions.''' + + +class AmbiguousTimeError(InvalidTimeError): + '''Exception raised when attempting to create an ambiguous wallclock time. + + At the end of a DST transition period, a particular wallclock time will + occur twice (once before the clocks are set back, once after). Both + possibilities may be correct, unless further information is supplied. + + See DstTzInfo.normalize() for more info + ''' + + +class NonExistentTimeError(InvalidTimeError): + '''Exception raised when attempting to create a wallclock time that + cannot exist. + + At the start of a DST transition period, the wallclock time jumps forward. + The instants jumped over never occur. + ''' diff --git a/lib/pytz/lazy.py b/lib/pytz/lazy.py new file mode 100644 index 00000000..f7fc597c --- /dev/null +++ b/lib/pytz/lazy.py @@ -0,0 +1,168 @@ +from threading import RLock +try: + from UserDict import DictMixin +except ImportError: + from collections import Mapping as DictMixin + + +# With lazy loading, we might end up with multiple threads triggering +# it at the same time. We need a lock. +_fill_lock = RLock() + + +class LazyDict(DictMixin): + """Dictionary populated on first use.""" + data = None + def __getitem__(self, key): + if self.data is None: + _fill_lock.acquire() + try: + if self.data is None: + self._fill() + finally: + _fill_lock.release() + return self.data[key.upper()] + + def __contains__(self, key): + if self.data is None: + _fill_lock.acquire() + try: + if self.data is None: + self._fill() + finally: + _fill_lock.release() + return key in self.data + + def __iter__(self): + if self.data is None: + _fill_lock.acquire() + try: + if self.data is None: + self._fill() + finally: + _fill_lock.release() + return iter(self.data) + + def __len__(self): + if self.data is None: + _fill_lock.acquire() + try: + if self.data is None: + self._fill() + finally: + _fill_lock.release() + return len(self.data) + + def keys(self): + if self.data is None: + _fill_lock.acquire() + try: + if self.data is None: + self._fill() + finally: + _fill_lock.release() + return self.data.keys() + + +class LazyList(list): + """List populated on first use.""" + + _props = [ + '__str__', '__repr__', '__unicode__', + '__hash__', '__sizeof__', '__cmp__', + '__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__', + 'append', 'count', 'index', 'extend', 'insert', 'pop', 'remove', + 'reverse', 'sort', '__add__', '__radd__', '__iadd__', '__mul__', + '__rmul__', '__imul__', '__contains__', '__len__', '__nonzero__', + '__getitem__', '__setitem__', '__delitem__', '__iter__', + '__reversed__', '__getslice__', '__setslice__', '__delslice__'] + + def __new__(cls, fill_iter=None): + + if fill_iter is None: + return list() + + # We need a new class as we will be dynamically messing with its + # methods. + class LazyList(list): + pass + + fill_iter = [fill_iter] + + def lazy(name): + def _lazy(self, *args, **kw): + _fill_lock.acquire() + try: + if len(fill_iter) > 0: + list.extend(self, fill_iter.pop()) + for method_name in cls._props: + delattr(LazyList, method_name) + finally: + _fill_lock.release() + return getattr(list, name)(self, *args, **kw) + return _lazy + + for name in cls._props: + setattr(LazyList, name, lazy(name)) + + new_list = LazyList() + return new_list + +# Not all versions of Python declare the same magic methods. +# Filter out properties that don't exist in this version of Python +# from the list. +LazyList._props = [prop for prop in LazyList._props if hasattr(list, prop)] + + +class LazySet(set): + """Set populated on first use.""" + + _props = ( + '__str__', '__repr__', '__unicode__', + '__hash__', '__sizeof__', '__cmp__', + '__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__', + '__contains__', '__len__', '__nonzero__', + '__getitem__', '__setitem__', '__delitem__', '__iter__', + '__sub__', '__and__', '__xor__', '__or__', + '__rsub__', '__rand__', '__rxor__', '__ror__', + '__isub__', '__iand__', '__ixor__', '__ior__', + 'add', 'clear', 'copy', 'difference', 'difference_update', + 'discard', 'intersection', 'intersection_update', 'isdisjoint', + 'issubset', 'issuperset', 'pop', 'remove', + 'symmetric_difference', 'symmetric_difference_update', + 'union', 'update') + + def __new__(cls, fill_iter=None): + + if fill_iter is None: + return set() + + class LazySet(set): + pass + + fill_iter = [fill_iter] + + def lazy(name): + def _lazy(self, *args, **kw): + _fill_lock.acquire() + try: + if len(fill_iter) > 0: + for i in fill_iter.pop(): + set.add(self, i) + for method_name in cls._props: + delattr(LazySet, method_name) + finally: + _fill_lock.release() + return getattr(set, name)(self, *args, **kw) + return _lazy + + for name in cls._props: + setattr(LazySet, name, lazy(name)) + + new_set = LazySet() + return new_set + +# Not all versions of Python declare the same magic methods. +# Filter out properties that don't exist in this version of Python +# from the list. +LazySet._props = [prop for prop in LazySet._props if hasattr(set, prop)] diff --git a/lib/pytz/reference.py b/lib/pytz/reference.py new file mode 100644 index 00000000..3dda13e7 --- /dev/null +++ b/lib/pytz/reference.py @@ -0,0 +1,127 @@ +''' +Reference tzinfo implementations from the Python docs. +Used for testing against as they are only correct for the years +1987 to 2006. Do not use these for real code. +''' + +from datetime import tzinfo, timedelta, datetime +from pytz import utc, UTC, HOUR, ZERO + +# A class building tzinfo objects for fixed-offset time zones. +# Note that FixedOffset(0, "UTC") is a different way to build a +# UTC tzinfo object. + +class FixedOffset(tzinfo): + """Fixed offset in minutes east from UTC.""" + + def __init__(self, offset, name): + self.__offset = timedelta(minutes = offset) + self.__name = name + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return self.__name + + def dst(self, dt): + return ZERO + +# A class capturing the platform's idea of local time. + +import time as _time + +STDOFFSET = timedelta(seconds = -_time.timezone) +if _time.daylight: + DSTOFFSET = timedelta(seconds = -_time.altzone) +else: + DSTOFFSET = STDOFFSET + +DSTDIFF = DSTOFFSET - STDOFFSET + +class LocalTimezone(tzinfo): + + def utcoffset(self, dt): + if self._isdst(dt): + return DSTOFFSET + else: + return STDOFFSET + + def dst(self, dt): + if self._isdst(dt): + return DSTDIFF + else: + return ZERO + + def tzname(self, dt): + return _time.tzname[self._isdst(dt)] + + def _isdst(self, dt): + tt = (dt.year, dt.month, dt.day, + dt.hour, dt.minute, dt.second, + dt.weekday(), 0, -1) + stamp = _time.mktime(tt) + tt = _time.localtime(stamp) + return tt.tm_isdst > 0 + +Local = LocalTimezone() + +# A complete implementation of current DST rules for major US time zones. + +def first_sunday_on_or_after(dt): + days_to_go = 6 - dt.weekday() + if days_to_go: + dt += timedelta(days_to_go) + return dt + +# In the US, DST starts at 2am (standard time) on the first Sunday in April. +DSTSTART = datetime(1, 4, 1, 2) +# and ends at 2am (DST time; 1am standard time) on the last Sunday of Oct. +# which is the first Sunday on or after Oct 25. +DSTEND = datetime(1, 10, 25, 1) + +class USTimeZone(tzinfo): + + def __init__(self, hours, reprname, stdname, dstname): + self.stdoffset = timedelta(hours=hours) + self.reprname = reprname + self.stdname = stdname + self.dstname = dstname + + def __repr__(self): + return self.reprname + + def tzname(self, dt): + if self.dst(dt): + return self.dstname + else: + return self.stdname + + def utcoffset(self, dt): + return self.stdoffset + self.dst(dt) + + def dst(self, dt): + if dt is None or dt.tzinfo is None: + # An exception may be sensible here, in one or both cases. + # It depends on how you want to treat them. The default + # fromutc() implementation (called by the default astimezone() + # implementation) passes a datetime with dt.tzinfo is self. + return ZERO + assert dt.tzinfo is self + + # Find first Sunday in April & the last in October. + start = first_sunday_on_or_after(DSTSTART.replace(year=dt.year)) + end = first_sunday_on_or_after(DSTEND.replace(year=dt.year)) + + # Can't compare naive to aware objects, so strip the timezone from + # dt first. + if start <= dt.replace(tzinfo=None) < end: + return HOUR + else: + return ZERO + +Eastern = USTimeZone(-5, "Eastern", "EST", "EDT") +Central = USTimeZone(-6, "Central", "CST", "CDT") +Mountain = USTimeZone(-7, "Mountain", "MST", "MDT") +Pacific = USTimeZone(-8, "Pacific", "PST", "PDT") + diff --git a/lib/pytz/tzfile.py b/lib/pytz/tzfile.py new file mode 100644 index 00000000..9c007c80 --- /dev/null +++ b/lib/pytz/tzfile.py @@ -0,0 +1,137 @@ +#!/usr/bin/env python +''' +$Id: tzfile.py,v 1.8 2004/06/03 00:15:24 zenzen Exp $ +''' + +try: + from cStringIO import StringIO +except ImportError: + from io import StringIO +from datetime import datetime, timedelta +from struct import unpack, calcsize + +from pytz.tzinfo import StaticTzInfo, DstTzInfo, memorized_ttinfo +from pytz.tzinfo import memorized_datetime, memorized_timedelta + +def _byte_string(s): + """Cast a string or byte string to an ASCII byte string.""" + return s.encode('US-ASCII') + +_NULL = _byte_string('\0') + +def _std_string(s): + """Cast a string or byte string to an ASCII string.""" + return str(s.decode('US-ASCII')) + +def build_tzinfo(zone, fp): + head_fmt = '>4s c 15x 6l' + head_size = calcsize(head_fmt) + (magic, format, ttisgmtcnt, ttisstdcnt,leapcnt, timecnt, + typecnt, charcnt) = unpack(head_fmt, fp.read(head_size)) + + # Make sure it is a tzfile(5) file + assert magic == _byte_string('TZif'), 'Got magic %s' % repr(magic) + + # Read out the transition times, localtime indices and ttinfo structures. + data_fmt = '>%(timecnt)dl %(timecnt)dB %(ttinfo)s %(charcnt)ds' % dict( + timecnt=timecnt, ttinfo='lBB'*typecnt, charcnt=charcnt) + data_size = calcsize(data_fmt) + data = unpack(data_fmt, fp.read(data_size)) + + # make sure we unpacked the right number of values + assert len(data) == 2 * timecnt + 3 * typecnt + 1 + transitions = [memorized_datetime(trans) + for trans in data[:timecnt]] + lindexes = list(data[timecnt:2 * timecnt]) + ttinfo_raw = data[2 * timecnt:-1] + tznames_raw = data[-1] + del data + + # Process ttinfo into separate structs + ttinfo = [] + tznames = {} + i = 0 + while i < len(ttinfo_raw): + # have we looked up this timezone name yet? + tzname_offset = ttinfo_raw[i+2] + if tzname_offset not in tznames: + nul = tznames_raw.find(_NULL, tzname_offset) + if nul < 0: + nul = len(tznames_raw) + tznames[tzname_offset] = _std_string( + tznames_raw[tzname_offset:nul]) + ttinfo.append((ttinfo_raw[i], + bool(ttinfo_raw[i+1]), + tznames[tzname_offset])) + i += 3 + + # Now build the timezone object + if len(transitions) == 0: + ttinfo[0][0], ttinfo[0][2] + cls = type(zone, (StaticTzInfo,), dict( + zone=zone, + _utcoffset=memorized_timedelta(ttinfo[0][0]), + _tzname=ttinfo[0][2])) + else: + # Early dates use the first standard time ttinfo + i = 0 + while ttinfo[i][1]: + i += 1 + if ttinfo[i] == ttinfo[lindexes[0]]: + transitions[0] = datetime.min + else: + transitions.insert(0, datetime.min) + lindexes.insert(0, i) + + # calculate transition info + transition_info = [] + for i in range(len(transitions)): + inf = ttinfo[lindexes[i]] + utcoffset = inf[0] + if not inf[1]: + dst = 0 + else: + for j in range(i-1, -1, -1): + prev_inf = ttinfo[lindexes[j]] + if not prev_inf[1]: + break + dst = inf[0] - prev_inf[0] # dst offset + + # Bad dst? Look further. DST > 24 hours happens when + # a timzone has moved across the international dateline. + if dst <= 0 or dst > 3600*3: + for j in range(i+1, len(transitions)): + stdinf = ttinfo[lindexes[j]] + if not stdinf[1]: + dst = inf[0] - stdinf[0] + if dst > 0: + break # Found a useful std time. + + tzname = inf[2] + + # Round utcoffset and dst to the nearest minute or the + # datetime library will complain. Conversions to these timezones + # might be up to plus or minus 30 seconds out, but it is + # the best we can do. + utcoffset = int((utcoffset + 30) // 60) * 60 + dst = int((dst + 30) // 60) * 60 + transition_info.append(memorized_ttinfo(utcoffset, dst, tzname)) + + cls = type(zone, (DstTzInfo,), dict( + zone=zone, + _utc_transition_times=transitions, + _transition_info=transition_info)) + + return cls() + +if __name__ == '__main__': + import os.path + from pprint import pprint + base = os.path.join(os.path.dirname(__file__), 'zoneinfo') + tz = build_tzinfo('Australia/Melbourne', + open(os.path.join(base,'Australia','Melbourne'), 'rb')) + tz = build_tzinfo('US/Eastern', + open(os.path.join(base,'US','Eastern'), 'rb')) + pprint(tz._utc_transition_times) + #print tz.asPython(4) + #print tz.transitions_mapping diff --git a/lib/pytz/tzinfo.py b/lib/pytz/tzinfo.py new file mode 100644 index 00000000..1318872d --- /dev/null +++ b/lib/pytz/tzinfo.py @@ -0,0 +1,564 @@ +'''Base classes and helpers for building zone specific tzinfo classes''' + +from datetime import datetime, timedelta, tzinfo +from bisect import bisect_right +try: + set +except NameError: + from sets import Set as set + +import pytz +from pytz.exceptions import AmbiguousTimeError, NonExistentTimeError + +__all__ = [] + +_timedelta_cache = {} +def memorized_timedelta(seconds): + '''Create only one instance of each distinct timedelta''' + try: + return _timedelta_cache[seconds] + except KeyError: + delta = timedelta(seconds=seconds) + _timedelta_cache[seconds] = delta + return delta + +_epoch = datetime.utcfromtimestamp(0) +_datetime_cache = {0: _epoch} +def memorized_datetime(seconds): + '''Create only one instance of each distinct datetime''' + try: + return _datetime_cache[seconds] + except KeyError: + # NB. We can't just do datetime.utcfromtimestamp(seconds) as this + # fails with negative values under Windows (Bug #90096) + dt = _epoch + timedelta(seconds=seconds) + _datetime_cache[seconds] = dt + return dt + +_ttinfo_cache = {} +def memorized_ttinfo(*args): + '''Create only one instance of each distinct tuple''' + try: + return _ttinfo_cache[args] + except KeyError: + ttinfo = ( + memorized_timedelta(args[0]), + memorized_timedelta(args[1]), + args[2] + ) + _ttinfo_cache[args] = ttinfo + return ttinfo + +_notime = memorized_timedelta(0) + +def _to_seconds(td): + '''Convert a timedelta to seconds''' + return td.seconds + td.days * 24 * 60 * 60 + + +class BaseTzInfo(tzinfo): + # Overridden in subclass + _utcoffset = None + _tzname = None + zone = None + + def __str__(self): + return self.zone + + +class StaticTzInfo(BaseTzInfo): + '''A timezone that has a constant offset from UTC + + These timezones are rare, as most locations have changed their + offset at some point in their history + ''' + def fromutc(self, dt): + '''See datetime.tzinfo.fromutc''' + if dt.tzinfo is not None and dt.tzinfo is not self: + raise ValueError('fromutc: dt.tzinfo is not self') + return (dt + self._utcoffset).replace(tzinfo=self) + + def utcoffset(self, dt, is_dst=None): + '''See datetime.tzinfo.utcoffset + + is_dst is ignored for StaticTzInfo, and exists only to + retain compatibility with DstTzInfo. + ''' + return self._utcoffset + + def dst(self, dt, is_dst=None): + '''See datetime.tzinfo.dst + + is_dst is ignored for StaticTzInfo, and exists only to + retain compatibility with DstTzInfo. + ''' + return _notime + + def tzname(self, dt, is_dst=None): + '''See datetime.tzinfo.tzname + + is_dst is ignored for StaticTzInfo, and exists only to + retain compatibility with DstTzInfo. + ''' + return self._tzname + + def localize(self, dt, is_dst=False): + '''Convert naive time to local time''' + if dt.tzinfo is not None: + raise ValueError('Not naive datetime (tzinfo is already set)') + return dt.replace(tzinfo=self) + + def normalize(self, dt, is_dst=False): + '''Correct the timezone information on the given datetime. + + This is normally a no-op, as StaticTzInfo timezones never have + ambiguous cases to correct: + + >>> from pytz import timezone + >>> gmt = timezone('GMT') + >>> isinstance(gmt, StaticTzInfo) + True + >>> dt = datetime(2011, 5, 8, 1, 2, 3, tzinfo=gmt) + >>> gmt.normalize(dt) is dt + True + + The supported method of converting between timezones is to use + datetime.astimezone(). Currently normalize() also works: + + >>> la = timezone('America/Los_Angeles') + >>> dt = la.localize(datetime(2011, 5, 7, 1, 2, 3)) + >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' + >>> gmt.normalize(dt).strftime(fmt) + '2011-05-07 08:02:03 GMT (+0000)' + ''' + if dt.tzinfo is self: + return dt + if dt.tzinfo is None: + raise ValueError('Naive time - no tzinfo set') + return dt.astimezone(self) + + def __repr__(self): + return '' % (self.zone,) + + def __reduce__(self): + # Special pickle to zone remains a singleton and to cope with + # database changes. + return pytz._p, (self.zone,) + + +class DstTzInfo(BaseTzInfo): + '''A timezone that has a variable offset from UTC + + The offset might change if daylight saving time comes into effect, + or at a point in history when the region decides to change their + timezone definition. + ''' + # Overridden in subclass + _utc_transition_times = None # Sorted list of DST transition times in UTC + _transition_info = None # [(utcoffset, dstoffset, tzname)] corresponding + # to _utc_transition_times entries + zone = None + + # Set in __init__ + _tzinfos = None + _dst = None # DST offset + + def __init__(self, _inf=None, _tzinfos=None): + if _inf: + self._tzinfos = _tzinfos + self._utcoffset, self._dst, self._tzname = _inf + else: + _tzinfos = {} + self._tzinfos = _tzinfos + self._utcoffset, self._dst, self._tzname = self._transition_info[0] + _tzinfos[self._transition_info[0]] = self + for inf in self._transition_info[1:]: + if inf not in _tzinfos: + _tzinfos[inf] = self.__class__(inf, _tzinfos) + + def fromutc(self, dt): + '''See datetime.tzinfo.fromutc''' + if (dt.tzinfo is not None + and getattr(dt.tzinfo, '_tzinfos', None) is not self._tzinfos): + raise ValueError('fromutc: dt.tzinfo is not self') + dt = dt.replace(tzinfo=None) + idx = max(0, bisect_right(self._utc_transition_times, dt) - 1) + inf = self._transition_info[idx] + return (dt + inf[0]).replace(tzinfo=self._tzinfos[inf]) + + def normalize(self, dt): + '''Correct the timezone information on the given datetime + + If date arithmetic crosses DST boundaries, the tzinfo + is not magically adjusted. This method normalizes the + tzinfo to the correct one. + + To test, first we need to do some setup + + >>> from pytz import timezone + >>> utc = timezone('UTC') + >>> eastern = timezone('US/Eastern') + >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' + + We next create a datetime right on an end-of-DST transition point, + the instant when the wallclocks are wound back one hour. + + >>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc) + >>> loc_dt = utc_dt.astimezone(eastern) + >>> loc_dt.strftime(fmt) + '2002-10-27 01:00:00 EST (-0500)' + + Now, if we subtract a few minutes from it, note that the timezone + information has not changed. + + >>> before = loc_dt - timedelta(minutes=10) + >>> before.strftime(fmt) + '2002-10-27 00:50:00 EST (-0500)' + + But we can fix that by calling the normalize method + + >>> before = eastern.normalize(before) + >>> before.strftime(fmt) + '2002-10-27 01:50:00 EDT (-0400)' + + The supported method of converting between timezones is to use + datetime.astimezone(). Currently, normalize() also works: + + >>> th = timezone('Asia/Bangkok') + >>> am = timezone('Europe/Amsterdam') + >>> dt = th.localize(datetime(2011, 5, 7, 1, 2, 3)) + >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' + >>> am.normalize(dt).strftime(fmt) + '2011-05-06 20:02:03 CEST (+0200)' + ''' + if dt.tzinfo is None: + raise ValueError('Naive time - no tzinfo set') + + # Convert dt in localtime to UTC + offset = dt.tzinfo._utcoffset + dt = dt.replace(tzinfo=None) + dt = dt - offset + # convert it back, and return it + return self.fromutc(dt) + + def localize(self, dt, is_dst=False): + '''Convert naive time to local time. + + This method should be used to construct localtimes, rather + than passing a tzinfo argument to a datetime constructor. + + is_dst is used to determine the correct timezone in the ambigous + period at the end of daylight saving time. + + >>> from pytz import timezone + >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' + >>> amdam = timezone('Europe/Amsterdam') + >>> dt = datetime(2004, 10, 31, 2, 0, 0) + >>> loc_dt1 = amdam.localize(dt, is_dst=True) + >>> loc_dt2 = amdam.localize(dt, is_dst=False) + >>> loc_dt1.strftime(fmt) + '2004-10-31 02:00:00 CEST (+0200)' + >>> loc_dt2.strftime(fmt) + '2004-10-31 02:00:00 CET (+0100)' + >>> str(loc_dt2 - loc_dt1) + '1:00:00' + + Use is_dst=None to raise an AmbiguousTimeError for ambiguous + times at the end of daylight saving time + + >>> try: + ... loc_dt1 = amdam.localize(dt, is_dst=None) + ... except AmbiguousTimeError: + ... print('Ambiguous') + Ambiguous + + is_dst defaults to False + + >>> amdam.localize(dt) == amdam.localize(dt, False) + True + + is_dst is also used to determine the correct timezone in the + wallclock times jumped over at the start of daylight saving time. + + >>> pacific = timezone('US/Pacific') + >>> dt = datetime(2008, 3, 9, 2, 0, 0) + >>> ploc_dt1 = pacific.localize(dt, is_dst=True) + >>> ploc_dt2 = pacific.localize(dt, is_dst=False) + >>> ploc_dt1.strftime(fmt) + '2008-03-09 02:00:00 PDT (-0700)' + >>> ploc_dt2.strftime(fmt) + '2008-03-09 02:00:00 PST (-0800)' + >>> str(ploc_dt2 - ploc_dt1) + '1:00:00' + + Use is_dst=None to raise a NonExistentTimeError for these skipped + times. + + >>> try: + ... loc_dt1 = pacific.localize(dt, is_dst=None) + ... except NonExistentTimeError: + ... print('Non-existent') + Non-existent + ''' + if dt.tzinfo is not None: + raise ValueError('Not naive datetime (tzinfo is already set)') + + # Find the two best possibilities. + possible_loc_dt = set() + for delta in [timedelta(days=-1), timedelta(days=1)]: + loc_dt = dt + delta + idx = max(0, bisect_right( + self._utc_transition_times, loc_dt) - 1) + inf = self._transition_info[idx] + tzinfo = self._tzinfos[inf] + loc_dt = tzinfo.normalize(dt.replace(tzinfo=tzinfo)) + if loc_dt.replace(tzinfo=None) == dt: + possible_loc_dt.add(loc_dt) + + if len(possible_loc_dt) == 1: + return possible_loc_dt.pop() + + # If there are no possibly correct timezones, we are attempting + # to convert a time that never happened - the time period jumped + # during the start-of-DST transition period. + if len(possible_loc_dt) == 0: + # If we refuse to guess, raise an exception. + if is_dst is None: + raise NonExistentTimeError(dt) + + # If we are forcing the pre-DST side of the DST transition, we + # obtain the correct timezone by winding the clock forward a few + # hours. + elif is_dst: + return self.localize( + dt + timedelta(hours=6), is_dst=True) - timedelta(hours=6) + + # If we are forcing the post-DST side of the DST transition, we + # obtain the correct timezone by winding the clock back. + else: + return self.localize( + dt - timedelta(hours=6), is_dst=False) + timedelta(hours=6) + + + # If we get this far, we have multiple possible timezones - this + # is an ambiguous case occuring during the end-of-DST transition. + + # If told to be strict, raise an exception since we have an + # ambiguous case + if is_dst is None: + raise AmbiguousTimeError(dt) + + # Filter out the possiblilities that don't match the requested + # is_dst + filtered_possible_loc_dt = [ + p for p in possible_loc_dt + if bool(p.tzinfo._dst) == is_dst + ] + + # Hopefully we only have one possibility left. Return it. + if len(filtered_possible_loc_dt) == 1: + return filtered_possible_loc_dt[0] + + if len(filtered_possible_loc_dt) == 0: + filtered_possible_loc_dt = list(possible_loc_dt) + + # If we get this far, we have in a wierd timezone transition + # where the clocks have been wound back but is_dst is the same + # in both (eg. Europe/Warsaw 1915 when they switched to CET). + # At this point, we just have to guess unless we allow more + # hints to be passed in (such as the UTC offset or abbreviation), + # but that is just getting silly. + # + # Choose the earliest (by UTC) applicable timezone if is_dst=True + # Choose the latest (by UTC) applicable timezone if is_dst=False + # i.e., behave like end-of-DST transition + dates = {} # utc -> local + for local_dt in filtered_possible_loc_dt: + utc_time = local_dt.replace(tzinfo=None) - local_dt.tzinfo._utcoffset + assert utc_time not in dates + dates[utc_time] = local_dt + return dates[[min, max][not is_dst](dates)] + + def utcoffset(self, dt, is_dst=None): + '''See datetime.tzinfo.utcoffset + + The is_dst parameter may be used to remove ambiguity during DST + transitions. + + >>> from pytz import timezone + >>> tz = timezone('America/St_Johns') + >>> ambiguous = datetime(2009, 10, 31, 23, 30) + + >>> tz.utcoffset(ambiguous, is_dst=False) + datetime.timedelta(-1, 73800) + + >>> tz.utcoffset(ambiguous, is_dst=True) + datetime.timedelta(-1, 77400) + + >>> try: + ... tz.utcoffset(ambiguous) + ... except AmbiguousTimeError: + ... print('Ambiguous') + Ambiguous + + ''' + if dt is None: + return None + elif dt.tzinfo is not self: + dt = self.localize(dt, is_dst) + return dt.tzinfo._utcoffset + else: + return self._utcoffset + + def dst(self, dt, is_dst=None): + '''See datetime.tzinfo.dst + + The is_dst parameter may be used to remove ambiguity during DST + transitions. + + >>> from pytz import timezone + >>> tz = timezone('America/St_Johns') + + >>> normal = datetime(2009, 9, 1) + + >>> tz.dst(normal) + datetime.timedelta(0, 3600) + >>> tz.dst(normal, is_dst=False) + datetime.timedelta(0, 3600) + >>> tz.dst(normal, is_dst=True) + datetime.timedelta(0, 3600) + + >>> ambiguous = datetime(2009, 10, 31, 23, 30) + + >>> tz.dst(ambiguous, is_dst=False) + datetime.timedelta(0) + >>> tz.dst(ambiguous, is_dst=True) + datetime.timedelta(0, 3600) + >>> try: + ... tz.dst(ambiguous) + ... except AmbiguousTimeError: + ... print('Ambiguous') + Ambiguous + + ''' + if dt is None: + return None + elif dt.tzinfo is not self: + dt = self.localize(dt, is_dst) + return dt.tzinfo._dst + else: + return self._dst + + def tzname(self, dt, is_dst=None): + '''See datetime.tzinfo.tzname + + The is_dst parameter may be used to remove ambiguity during DST + transitions. + + >>> from pytz import timezone + >>> tz = timezone('America/St_Johns') + + >>> normal = datetime(2009, 9, 1) + + >>> tz.tzname(normal) + 'NDT' + >>> tz.tzname(normal, is_dst=False) + 'NDT' + >>> tz.tzname(normal, is_dst=True) + 'NDT' + + >>> ambiguous = datetime(2009, 10, 31, 23, 30) + + >>> tz.tzname(ambiguous, is_dst=False) + 'NST' + >>> tz.tzname(ambiguous, is_dst=True) + 'NDT' + >>> try: + ... tz.tzname(ambiguous) + ... except AmbiguousTimeError: + ... print('Ambiguous') + Ambiguous + ''' + if dt is None: + return self.zone + elif dt.tzinfo is not self: + dt = self.localize(dt, is_dst) + return dt.tzinfo._tzname + else: + return self._tzname + + def __repr__(self): + if self._dst: + dst = 'DST' + else: + dst = 'STD' + if self._utcoffset > _notime: + return '' % ( + self.zone, self._tzname, self._utcoffset, dst + ) + else: + return '' % ( + self.zone, self._tzname, self._utcoffset, dst + ) + + def __reduce__(self): + # Special pickle to zone remains a singleton and to cope with + # database changes. + return pytz._p, ( + self.zone, + _to_seconds(self._utcoffset), + _to_seconds(self._dst), + self._tzname + ) + + + +def unpickler(zone, utcoffset=None, dstoffset=None, tzname=None): + """Factory function for unpickling pytz tzinfo instances. + + This is shared for both StaticTzInfo and DstTzInfo instances, because + database changes could cause a zones implementation to switch between + these two base classes and we can't break pickles on a pytz version + upgrade. + """ + # Raises a KeyError if zone no longer exists, which should never happen + # and would be a bug. + tz = pytz.timezone(zone) + + # A StaticTzInfo - just return it + if utcoffset is None: + return tz + + # This pickle was created from a DstTzInfo. We need to + # determine which of the list of tzinfo instances for this zone + # to use in order to restore the state of any datetime instances using + # it correctly. + utcoffset = memorized_timedelta(utcoffset) + dstoffset = memorized_timedelta(dstoffset) + try: + return tz._tzinfos[(utcoffset, dstoffset, tzname)] + except KeyError: + # The particular state requested in this timezone no longer exists. + # This indicates a corrupt pickle, or the timezone database has been + # corrected violently enough to make this particular + # (utcoffset,dstoffset) no longer exist in the zone, or the + # abbreviation has been changed. + pass + + # See if we can find an entry differing only by tzname. Abbreviations + # get changed from the initial guess by the database maintainers to + # match reality when this information is discovered. + for localized_tz in tz._tzinfos.values(): + if (localized_tz._utcoffset == utcoffset + and localized_tz._dst == dstoffset): + return localized_tz + + # This (utcoffset, dstoffset) information has been removed from the + # zone. Add it back. This might occur when the database maintainers have + # corrected incorrect information. datetime instances using this + # incorrect information will continue to do so, exactly as they were + # before being pickled. This is purely an overly paranoid safety net - I + # doubt this will ever been needed in real life. + inf = (utcoffset, dstoffset, tzname) + tz._tzinfos[inf] = tz.__class__(inf, tz._tzinfos) + return tz._tzinfos[inf] diff --git a/lib/pytz/zoneinfo/Africa/Abidjan b/lib/pytz/zoneinfo/Africa/Abidjan new file mode 100644 index 00000000..6fd1af32 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Abidjan differ diff --git a/lib/pytz/zoneinfo/Africa/Accra b/lib/pytz/zoneinfo/Africa/Accra new file mode 100644 index 00000000..6ff8fb6b Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Accra differ diff --git a/lib/pytz/zoneinfo/Africa/Addis_Ababa b/lib/pytz/zoneinfo/Africa/Addis_Ababa new file mode 100644 index 00000000..750d3dc1 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Addis_Ababa differ diff --git a/lib/pytz/zoneinfo/Africa/Algiers b/lib/pytz/zoneinfo/Africa/Algiers new file mode 100644 index 00000000..2a25f3ac Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Algiers differ diff --git a/lib/pytz/zoneinfo/Africa/Asmara b/lib/pytz/zoneinfo/Africa/Asmara new file mode 100644 index 00000000..750d3dc1 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Asmara differ diff --git a/lib/pytz/zoneinfo/Africa/Asmera b/lib/pytz/zoneinfo/Africa/Asmera new file mode 100644 index 00000000..750d3dc1 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Asmera differ diff --git a/lib/pytz/zoneinfo/Africa/Bamako b/lib/pytz/zoneinfo/Africa/Bamako new file mode 100644 index 00000000..6fd1af32 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Bamako differ diff --git a/lib/pytz/zoneinfo/Africa/Bangui b/lib/pytz/zoneinfo/Africa/Bangui new file mode 100644 index 00000000..b1c97cc5 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Bangui differ diff --git a/lib/pytz/zoneinfo/Africa/Banjul b/lib/pytz/zoneinfo/Africa/Banjul new file mode 100644 index 00000000..6fd1af32 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Banjul differ diff --git a/lib/pytz/zoneinfo/Africa/Bissau b/lib/pytz/zoneinfo/Africa/Bissau new file mode 100644 index 00000000..0696667c Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Bissau differ diff --git a/lib/pytz/zoneinfo/Africa/Blantyre b/lib/pytz/zoneinfo/Africa/Blantyre new file mode 100644 index 00000000..5b871dba Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Blantyre differ diff --git a/lib/pytz/zoneinfo/Africa/Brazzaville b/lib/pytz/zoneinfo/Africa/Brazzaville new file mode 100644 index 00000000..b1c97cc5 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Brazzaville differ diff --git a/lib/pytz/zoneinfo/Africa/Bujumbura b/lib/pytz/zoneinfo/Africa/Bujumbura new file mode 100644 index 00000000..5b871dba Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Bujumbura differ diff --git a/lib/pytz/zoneinfo/Africa/Cairo b/lib/pytz/zoneinfo/Africa/Cairo new file mode 100644 index 00000000..0eeed113 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Cairo differ diff --git a/lib/pytz/zoneinfo/Africa/Casablanca b/lib/pytz/zoneinfo/Africa/Casablanca new file mode 100644 index 00000000..c001c375 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Casablanca differ diff --git a/lib/pytz/zoneinfo/Africa/Ceuta b/lib/pytz/zoneinfo/Africa/Ceuta new file mode 100644 index 00000000..6227e2bb Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Ceuta differ diff --git a/lib/pytz/zoneinfo/Africa/Conakry b/lib/pytz/zoneinfo/Africa/Conakry new file mode 100644 index 00000000..6fd1af32 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Conakry differ diff --git a/lib/pytz/zoneinfo/Africa/Dakar b/lib/pytz/zoneinfo/Africa/Dakar new file mode 100644 index 00000000..6fd1af32 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Dakar differ diff --git a/lib/pytz/zoneinfo/Africa/Dar_es_Salaam b/lib/pytz/zoneinfo/Africa/Dar_es_Salaam new file mode 100644 index 00000000..750d3dc1 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Dar_es_Salaam differ diff --git a/lib/pytz/zoneinfo/Africa/Djibouti b/lib/pytz/zoneinfo/Africa/Djibouti new file mode 100644 index 00000000..750d3dc1 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Djibouti differ diff --git a/lib/pytz/zoneinfo/Africa/Douala b/lib/pytz/zoneinfo/Africa/Douala new file mode 100644 index 00000000..b1c97cc5 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Douala differ diff --git a/lib/pytz/zoneinfo/Africa/El_Aaiun b/lib/pytz/zoneinfo/Africa/El_Aaiun new file mode 100644 index 00000000..805d39e4 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/El_Aaiun differ diff --git a/lib/pytz/zoneinfo/Africa/Freetown b/lib/pytz/zoneinfo/Africa/Freetown new file mode 100644 index 00000000..6fd1af32 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Freetown differ diff --git a/lib/pytz/zoneinfo/Africa/Gaborone b/lib/pytz/zoneinfo/Africa/Gaborone new file mode 100644 index 00000000..5b871dba Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Gaborone differ diff --git a/lib/pytz/zoneinfo/Africa/Harare b/lib/pytz/zoneinfo/Africa/Harare new file mode 100644 index 00000000..5b871dba Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Harare differ diff --git a/lib/pytz/zoneinfo/Africa/Johannesburg b/lib/pytz/zoneinfo/Africa/Johannesburg new file mode 100644 index 00000000..ddf3652e Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Johannesburg differ diff --git a/lib/pytz/zoneinfo/Africa/Juba b/lib/pytz/zoneinfo/Africa/Juba new file mode 100644 index 00000000..36291882 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Juba differ diff --git a/lib/pytz/zoneinfo/Africa/Kampala b/lib/pytz/zoneinfo/Africa/Kampala new file mode 100644 index 00000000..750d3dc1 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Kampala differ diff --git a/lib/pytz/zoneinfo/Africa/Khartoum b/lib/pytz/zoneinfo/Africa/Khartoum new file mode 100644 index 00000000..36291882 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Khartoum differ diff --git a/lib/pytz/zoneinfo/Africa/Kigali b/lib/pytz/zoneinfo/Africa/Kigali new file mode 100644 index 00000000..5b871dba Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Kigali differ diff --git a/lib/pytz/zoneinfo/Africa/Kinshasa b/lib/pytz/zoneinfo/Africa/Kinshasa new file mode 100644 index 00000000..b1c97cc5 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Kinshasa differ diff --git a/lib/pytz/zoneinfo/Africa/Lagos b/lib/pytz/zoneinfo/Africa/Lagos new file mode 100644 index 00000000..b1c97cc5 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Lagos differ diff --git a/lib/pytz/zoneinfo/Africa/Libreville b/lib/pytz/zoneinfo/Africa/Libreville new file mode 100644 index 00000000..b1c97cc5 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Libreville differ diff --git a/lib/pytz/zoneinfo/Africa/Lome b/lib/pytz/zoneinfo/Africa/Lome new file mode 100644 index 00000000..6fd1af32 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Lome differ diff --git a/lib/pytz/zoneinfo/Africa/Luanda b/lib/pytz/zoneinfo/Africa/Luanda new file mode 100644 index 00000000..b1c97cc5 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Luanda differ diff --git a/lib/pytz/zoneinfo/Africa/Lubumbashi b/lib/pytz/zoneinfo/Africa/Lubumbashi new file mode 100644 index 00000000..5b871dba Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Lubumbashi differ diff --git a/lib/pytz/zoneinfo/Africa/Lusaka b/lib/pytz/zoneinfo/Africa/Lusaka new file mode 100644 index 00000000..5b871dba Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Lusaka differ diff --git a/lib/pytz/zoneinfo/Africa/Malabo b/lib/pytz/zoneinfo/Africa/Malabo new file mode 100644 index 00000000..b1c97cc5 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Malabo differ diff --git a/lib/pytz/zoneinfo/Africa/Maputo b/lib/pytz/zoneinfo/Africa/Maputo new file mode 100644 index 00000000..5b871dba Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Maputo differ diff --git a/lib/pytz/zoneinfo/Africa/Maseru b/lib/pytz/zoneinfo/Africa/Maseru new file mode 100644 index 00000000..ddf3652e Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Maseru differ diff --git a/lib/pytz/zoneinfo/Africa/Mbabane b/lib/pytz/zoneinfo/Africa/Mbabane new file mode 100644 index 00000000..ddf3652e Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Mbabane differ diff --git a/lib/pytz/zoneinfo/Africa/Mogadishu b/lib/pytz/zoneinfo/Africa/Mogadishu new file mode 100644 index 00000000..750d3dc1 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Mogadishu differ diff --git a/lib/pytz/zoneinfo/Africa/Monrovia b/lib/pytz/zoneinfo/Africa/Monrovia new file mode 100644 index 00000000..0f2294ea Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Monrovia differ diff --git a/lib/pytz/zoneinfo/Africa/Nairobi b/lib/pytz/zoneinfo/Africa/Nairobi new file mode 100644 index 00000000..750d3dc1 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Nairobi differ diff --git a/lib/pytz/zoneinfo/Africa/Ndjamena b/lib/pytz/zoneinfo/Africa/Ndjamena new file mode 100644 index 00000000..bbfe19d6 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Ndjamena differ diff --git a/lib/pytz/zoneinfo/Africa/Niamey b/lib/pytz/zoneinfo/Africa/Niamey new file mode 100644 index 00000000..b1c97cc5 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Niamey differ diff --git a/lib/pytz/zoneinfo/Africa/Nouakchott b/lib/pytz/zoneinfo/Africa/Nouakchott new file mode 100644 index 00000000..6fd1af32 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Nouakchott differ diff --git a/lib/pytz/zoneinfo/Africa/Ouagadougou b/lib/pytz/zoneinfo/Africa/Ouagadougou new file mode 100644 index 00000000..6fd1af32 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Ouagadougou differ diff --git a/lib/pytz/zoneinfo/Africa/Porto-Novo b/lib/pytz/zoneinfo/Africa/Porto-Novo new file mode 100644 index 00000000..b1c97cc5 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Porto-Novo differ diff --git a/lib/pytz/zoneinfo/Africa/Sao_Tome b/lib/pytz/zoneinfo/Africa/Sao_Tome new file mode 100644 index 00000000..6fd1af32 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Sao_Tome differ diff --git a/lib/pytz/zoneinfo/Africa/Timbuktu b/lib/pytz/zoneinfo/Africa/Timbuktu new file mode 100644 index 00000000..6fd1af32 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Timbuktu differ diff --git a/lib/pytz/zoneinfo/Africa/Tripoli b/lib/pytz/zoneinfo/Africa/Tripoli new file mode 100644 index 00000000..b32e2202 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Tripoli differ diff --git a/lib/pytz/zoneinfo/Africa/Tunis b/lib/pytz/zoneinfo/Africa/Tunis new file mode 100644 index 00000000..4bd3885a Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Tunis differ diff --git a/lib/pytz/zoneinfo/Africa/Windhoek b/lib/pytz/zoneinfo/Africa/Windhoek new file mode 100644 index 00000000..33bdfdf2 Binary files /dev/null and b/lib/pytz/zoneinfo/Africa/Windhoek differ diff --git a/lib/pytz/zoneinfo/America/Adak b/lib/pytz/zoneinfo/America/Adak new file mode 100644 index 00000000..b0a5dd60 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Adak differ diff --git a/lib/pytz/zoneinfo/America/Anchorage b/lib/pytz/zoneinfo/America/Anchorage new file mode 100644 index 00000000..a4627cac Binary files /dev/null and b/lib/pytz/zoneinfo/America/Anchorage differ diff --git a/lib/pytz/zoneinfo/America/Anguilla b/lib/pytz/zoneinfo/America/Anguilla new file mode 100644 index 00000000..447efbe2 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Anguilla differ diff --git a/lib/pytz/zoneinfo/America/Antigua b/lib/pytz/zoneinfo/America/Antigua new file mode 100644 index 00000000..66ab1474 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Antigua differ diff --git a/lib/pytz/zoneinfo/America/Araguaina b/lib/pytz/zoneinfo/America/Araguaina new file mode 100644 index 00000000..507ea469 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Araguaina differ diff --git a/lib/pytz/zoneinfo/America/Argentina/Buenos_Aires b/lib/pytz/zoneinfo/America/Argentina/Buenos_Aires new file mode 100644 index 00000000..a1fae8c8 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Argentina/Buenos_Aires differ diff --git a/lib/pytz/zoneinfo/America/Argentina/Catamarca b/lib/pytz/zoneinfo/America/Argentina/Catamarca new file mode 100644 index 00000000..7cbc9f4b Binary files /dev/null and b/lib/pytz/zoneinfo/America/Argentina/Catamarca differ diff --git a/lib/pytz/zoneinfo/America/Argentina/ComodRivadavia b/lib/pytz/zoneinfo/America/Argentina/ComodRivadavia new file mode 100644 index 00000000..7cbc9f4b Binary files /dev/null and b/lib/pytz/zoneinfo/America/Argentina/ComodRivadavia differ diff --git a/lib/pytz/zoneinfo/America/Argentina/Cordoba b/lib/pytz/zoneinfo/America/Argentina/Cordoba new file mode 100644 index 00000000..cd97a24b Binary files /dev/null and b/lib/pytz/zoneinfo/America/Argentina/Cordoba differ diff --git a/lib/pytz/zoneinfo/America/Argentina/Jujuy b/lib/pytz/zoneinfo/America/Argentina/Jujuy new file mode 100644 index 00000000..7be3eeb6 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Argentina/Jujuy differ diff --git a/lib/pytz/zoneinfo/America/Argentina/La_Rioja b/lib/pytz/zoneinfo/America/Argentina/La_Rioja new file mode 100644 index 00000000..1296ed44 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Argentina/La_Rioja differ diff --git a/lib/pytz/zoneinfo/America/Argentina/Mendoza b/lib/pytz/zoneinfo/America/Argentina/Mendoza new file mode 100644 index 00000000..f9eb526c Binary files /dev/null and b/lib/pytz/zoneinfo/America/Argentina/Mendoza differ diff --git a/lib/pytz/zoneinfo/America/Argentina/Rio_Gallegos b/lib/pytz/zoneinfo/America/Argentina/Rio_Gallegos new file mode 100644 index 00000000..8fd203d1 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Argentina/Rio_Gallegos differ diff --git a/lib/pytz/zoneinfo/America/Argentina/Salta b/lib/pytz/zoneinfo/America/Argentina/Salta new file mode 100644 index 00000000..5778059f Binary files /dev/null and b/lib/pytz/zoneinfo/America/Argentina/Salta differ diff --git a/lib/pytz/zoneinfo/America/Argentina/San_Juan b/lib/pytz/zoneinfo/America/Argentina/San_Juan new file mode 100644 index 00000000..8670279e Binary files /dev/null and b/lib/pytz/zoneinfo/America/Argentina/San_Juan differ diff --git a/lib/pytz/zoneinfo/America/Argentina/San_Luis b/lib/pytz/zoneinfo/America/Argentina/San_Luis new file mode 100644 index 00000000..51eb1d84 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Argentina/San_Luis differ diff --git a/lib/pytz/zoneinfo/America/Argentina/Tucuman b/lib/pytz/zoneinfo/America/Argentina/Tucuman new file mode 100644 index 00000000..694093e7 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Argentina/Tucuman differ diff --git a/lib/pytz/zoneinfo/America/Argentina/Ushuaia b/lib/pytz/zoneinfo/America/Argentina/Ushuaia new file mode 100644 index 00000000..dc42621d Binary files /dev/null and b/lib/pytz/zoneinfo/America/Argentina/Ushuaia differ diff --git a/lib/pytz/zoneinfo/America/Aruba b/lib/pytz/zoneinfo/America/Aruba new file mode 100644 index 00000000..05e77ab4 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Aruba differ diff --git a/lib/pytz/zoneinfo/America/Asuncion b/lib/pytz/zoneinfo/America/Asuncion new file mode 100644 index 00000000..79541fdd Binary files /dev/null and b/lib/pytz/zoneinfo/America/Asuncion differ diff --git a/lib/pytz/zoneinfo/America/Atikokan b/lib/pytz/zoneinfo/America/Atikokan new file mode 100644 index 00000000..5708b55a Binary files /dev/null and b/lib/pytz/zoneinfo/America/Atikokan differ diff --git a/lib/pytz/zoneinfo/America/Atka b/lib/pytz/zoneinfo/America/Atka new file mode 100644 index 00000000..b0a5dd60 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Atka differ diff --git a/lib/pytz/zoneinfo/America/Bahia b/lib/pytz/zoneinfo/America/Bahia new file mode 100644 index 00000000..3b599585 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Bahia differ diff --git a/lib/pytz/zoneinfo/America/Bahia_Banderas b/lib/pytz/zoneinfo/America/Bahia_Banderas new file mode 100644 index 00000000..21e2b719 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Bahia_Banderas differ diff --git a/lib/pytz/zoneinfo/America/Barbados b/lib/pytz/zoneinfo/America/Barbados new file mode 100644 index 00000000..63399360 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Barbados differ diff --git a/lib/pytz/zoneinfo/America/Belem b/lib/pytz/zoneinfo/America/Belem new file mode 100644 index 00000000..d85c0f72 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Belem differ diff --git a/lib/pytz/zoneinfo/America/Belize b/lib/pytz/zoneinfo/America/Belize new file mode 100644 index 00000000..eada52e7 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Belize differ diff --git a/lib/pytz/zoneinfo/America/Blanc-Sablon b/lib/pytz/zoneinfo/America/Blanc-Sablon new file mode 100644 index 00000000..abcde7d9 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Blanc-Sablon differ diff --git a/lib/pytz/zoneinfo/America/Boa_Vista b/lib/pytz/zoneinfo/America/Boa_Vista new file mode 100644 index 00000000..2466a25f Binary files /dev/null and b/lib/pytz/zoneinfo/America/Boa_Vista differ diff --git a/lib/pytz/zoneinfo/America/Bogota b/lib/pytz/zoneinfo/America/Bogota new file mode 100644 index 00000000..7a5a445a Binary files /dev/null and b/lib/pytz/zoneinfo/America/Bogota differ diff --git a/lib/pytz/zoneinfo/America/Boise b/lib/pytz/zoneinfo/America/Boise new file mode 100644 index 00000000..ada6d64b Binary files /dev/null and b/lib/pytz/zoneinfo/America/Boise differ diff --git a/lib/pytz/zoneinfo/America/Buenos_Aires b/lib/pytz/zoneinfo/America/Buenos_Aires new file mode 100644 index 00000000..a1fae8c8 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Buenos_Aires differ diff --git a/lib/pytz/zoneinfo/America/Cambridge_Bay b/lib/pytz/zoneinfo/America/Cambridge_Bay new file mode 100644 index 00000000..58e21baa Binary files /dev/null and b/lib/pytz/zoneinfo/America/Cambridge_Bay differ diff --git a/lib/pytz/zoneinfo/America/Campo_Grande b/lib/pytz/zoneinfo/America/Campo_Grande new file mode 100644 index 00000000..d810ae56 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Campo_Grande differ diff --git a/lib/pytz/zoneinfo/America/Cancun b/lib/pytz/zoneinfo/America/Cancun new file mode 100644 index 00000000..a99eedd7 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Cancun differ diff --git a/lib/pytz/zoneinfo/America/Caracas b/lib/pytz/zoneinfo/America/Caracas new file mode 100644 index 00000000..15b9a52c Binary files /dev/null and b/lib/pytz/zoneinfo/America/Caracas differ diff --git a/lib/pytz/zoneinfo/America/Catamarca b/lib/pytz/zoneinfo/America/Catamarca new file mode 100644 index 00000000..7cbc9f4b Binary files /dev/null and b/lib/pytz/zoneinfo/America/Catamarca differ diff --git a/lib/pytz/zoneinfo/America/Cayenne b/lib/pytz/zoneinfo/America/Cayenne new file mode 100644 index 00000000..bffe9b02 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Cayenne differ diff --git a/lib/pytz/zoneinfo/America/Cayman b/lib/pytz/zoneinfo/America/Cayman new file mode 100644 index 00000000..0eb14b75 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Cayman differ diff --git a/lib/pytz/zoneinfo/America/Chicago b/lib/pytz/zoneinfo/America/Chicago new file mode 100644 index 00000000..3dd8f0fa Binary files /dev/null and b/lib/pytz/zoneinfo/America/Chicago differ diff --git a/lib/pytz/zoneinfo/America/Chihuahua b/lib/pytz/zoneinfo/America/Chihuahua new file mode 100644 index 00000000..e3adbdbf Binary files /dev/null and b/lib/pytz/zoneinfo/America/Chihuahua differ diff --git a/lib/pytz/zoneinfo/America/Coral_Harbour b/lib/pytz/zoneinfo/America/Coral_Harbour new file mode 100644 index 00000000..5708b55a Binary files /dev/null and b/lib/pytz/zoneinfo/America/Coral_Harbour differ diff --git a/lib/pytz/zoneinfo/America/Cordoba b/lib/pytz/zoneinfo/America/Cordoba new file mode 100644 index 00000000..cd97a24b Binary files /dev/null and b/lib/pytz/zoneinfo/America/Cordoba differ diff --git a/lib/pytz/zoneinfo/America/Costa_Rica b/lib/pytz/zoneinfo/America/Costa_Rica new file mode 100644 index 00000000..c247133e Binary files /dev/null and b/lib/pytz/zoneinfo/America/Costa_Rica differ diff --git a/lib/pytz/zoneinfo/America/Creston b/lib/pytz/zoneinfo/America/Creston new file mode 100644 index 00000000..798f627a Binary files /dev/null and b/lib/pytz/zoneinfo/America/Creston differ diff --git a/lib/pytz/zoneinfo/America/Cuiaba b/lib/pytz/zoneinfo/America/Cuiaba new file mode 100644 index 00000000..e3aec8cc Binary files /dev/null and b/lib/pytz/zoneinfo/America/Cuiaba differ diff --git a/lib/pytz/zoneinfo/America/Curacao b/lib/pytz/zoneinfo/America/Curacao new file mode 100644 index 00000000..05e77ab4 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Curacao differ diff --git a/lib/pytz/zoneinfo/America/Danmarkshavn b/lib/pytz/zoneinfo/America/Danmarkshavn new file mode 100644 index 00000000..a8b58ada Binary files /dev/null and b/lib/pytz/zoneinfo/America/Danmarkshavn differ diff --git a/lib/pytz/zoneinfo/America/Dawson b/lib/pytz/zoneinfo/America/Dawson new file mode 100644 index 00000000..61c96889 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Dawson differ diff --git a/lib/pytz/zoneinfo/America/Dawson_Creek b/lib/pytz/zoneinfo/America/Dawson_Creek new file mode 100644 index 00000000..78f90763 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Dawson_Creek differ diff --git a/lib/pytz/zoneinfo/America/Denver b/lib/pytz/zoneinfo/America/Denver new file mode 100644 index 00000000..7fc66917 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Denver differ diff --git a/lib/pytz/zoneinfo/America/Detroit b/lib/pytz/zoneinfo/America/Detroit new file mode 100644 index 00000000..a123b331 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Detroit differ diff --git a/lib/pytz/zoneinfo/America/Dominica b/lib/pytz/zoneinfo/America/Dominica new file mode 100644 index 00000000..447efbe2 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Dominica differ diff --git a/lib/pytz/zoneinfo/America/Edmonton b/lib/pytz/zoneinfo/America/Edmonton new file mode 100644 index 00000000..d02fbcd4 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Edmonton differ diff --git a/lib/pytz/zoneinfo/America/Eirunepe b/lib/pytz/zoneinfo/America/Eirunepe new file mode 100644 index 00000000..3359731e Binary files /dev/null and b/lib/pytz/zoneinfo/America/Eirunepe differ diff --git a/lib/pytz/zoneinfo/America/El_Salvador b/lib/pytz/zoneinfo/America/El_Salvador new file mode 100644 index 00000000..9b8bc7a8 Binary files /dev/null and b/lib/pytz/zoneinfo/America/El_Salvador differ diff --git a/lib/pytz/zoneinfo/America/Ensenada b/lib/pytz/zoneinfo/America/Ensenada new file mode 100644 index 00000000..13874753 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Ensenada differ diff --git a/lib/pytz/zoneinfo/America/Fort_Wayne b/lib/pytz/zoneinfo/America/Fort_Wayne new file mode 100644 index 00000000..4a92c065 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Fort_Wayne differ diff --git a/lib/pytz/zoneinfo/America/Fortaleza b/lib/pytz/zoneinfo/America/Fortaleza new file mode 100644 index 00000000..c0bcf4dc Binary files /dev/null and b/lib/pytz/zoneinfo/America/Fortaleza differ diff --git a/lib/pytz/zoneinfo/America/Glace_Bay b/lib/pytz/zoneinfo/America/Glace_Bay new file mode 100644 index 00000000..f58522b6 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Glace_Bay differ diff --git a/lib/pytz/zoneinfo/America/Godthab b/lib/pytz/zoneinfo/America/Godthab new file mode 100644 index 00000000..111d9a81 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Godthab differ diff --git a/lib/pytz/zoneinfo/America/Goose_Bay b/lib/pytz/zoneinfo/America/Goose_Bay new file mode 100644 index 00000000..b4b945e8 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Goose_Bay differ diff --git a/lib/pytz/zoneinfo/America/Grand_Turk b/lib/pytz/zoneinfo/America/Grand_Turk new file mode 100644 index 00000000..331aeac2 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Grand_Turk differ diff --git a/lib/pytz/zoneinfo/America/Grenada b/lib/pytz/zoneinfo/America/Grenada new file mode 100644 index 00000000..447efbe2 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Grenada differ diff --git a/lib/pytz/zoneinfo/America/Guadeloupe b/lib/pytz/zoneinfo/America/Guadeloupe new file mode 100644 index 00000000..447efbe2 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Guadeloupe differ diff --git a/lib/pytz/zoneinfo/America/Guatemala b/lib/pytz/zoneinfo/America/Guatemala new file mode 100644 index 00000000..abf943be Binary files /dev/null and b/lib/pytz/zoneinfo/America/Guatemala differ diff --git a/lib/pytz/zoneinfo/America/Guayaquil b/lib/pytz/zoneinfo/America/Guayaquil new file mode 100644 index 00000000..08289046 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Guayaquil differ diff --git a/lib/pytz/zoneinfo/America/Guyana b/lib/pytz/zoneinfo/America/Guyana new file mode 100644 index 00000000..036dbe06 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Guyana differ diff --git a/lib/pytz/zoneinfo/America/Halifax b/lib/pytz/zoneinfo/America/Halifax new file mode 100644 index 00000000..f86ece4c Binary files /dev/null and b/lib/pytz/zoneinfo/America/Halifax differ diff --git a/lib/pytz/zoneinfo/America/Havana b/lib/pytz/zoneinfo/America/Havana new file mode 100644 index 00000000..1a58fcdc Binary files /dev/null and b/lib/pytz/zoneinfo/America/Havana differ diff --git a/lib/pytz/zoneinfo/America/Hermosillo b/lib/pytz/zoneinfo/America/Hermosillo new file mode 100644 index 00000000..ec435c23 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Hermosillo differ diff --git a/lib/pytz/zoneinfo/America/Indiana/Indianapolis b/lib/pytz/zoneinfo/America/Indiana/Indianapolis new file mode 100644 index 00000000..4a92c065 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Indiana/Indianapolis differ diff --git a/lib/pytz/zoneinfo/America/Indiana/Knox b/lib/pytz/zoneinfo/America/Indiana/Knox new file mode 100644 index 00000000..cc785da9 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Indiana/Knox differ diff --git a/lib/pytz/zoneinfo/America/Indiana/Marengo b/lib/pytz/zoneinfo/America/Indiana/Marengo new file mode 100644 index 00000000..a23d7b75 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Indiana/Marengo differ diff --git a/lib/pytz/zoneinfo/America/Indiana/Petersburg b/lib/pytz/zoneinfo/America/Indiana/Petersburg new file mode 100644 index 00000000..f16cb304 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Indiana/Petersburg differ diff --git a/lib/pytz/zoneinfo/America/Indiana/Tell_City b/lib/pytz/zoneinfo/America/Indiana/Tell_City new file mode 100644 index 00000000..0250bf90 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Indiana/Tell_City differ diff --git a/lib/pytz/zoneinfo/America/Indiana/Vevay b/lib/pytz/zoneinfo/America/Indiana/Vevay new file mode 100644 index 00000000..e934de61 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Indiana/Vevay differ diff --git a/lib/pytz/zoneinfo/America/Indiana/Vincennes b/lib/pytz/zoneinfo/America/Indiana/Vincennes new file mode 100644 index 00000000..adbdbeee Binary files /dev/null and b/lib/pytz/zoneinfo/America/Indiana/Vincennes differ diff --git a/lib/pytz/zoneinfo/America/Indiana/Winamac b/lib/pytz/zoneinfo/America/Indiana/Winamac new file mode 100644 index 00000000..b34f7b27 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Indiana/Winamac differ diff --git a/lib/pytz/zoneinfo/America/Indianapolis b/lib/pytz/zoneinfo/America/Indianapolis new file mode 100644 index 00000000..4a92c065 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Indianapolis differ diff --git a/lib/pytz/zoneinfo/America/Inuvik b/lib/pytz/zoneinfo/America/Inuvik new file mode 100644 index 00000000..077fad4f Binary files /dev/null and b/lib/pytz/zoneinfo/America/Inuvik differ diff --git a/lib/pytz/zoneinfo/America/Iqaluit b/lib/pytz/zoneinfo/America/Iqaluit new file mode 100644 index 00000000..e67b71fe Binary files /dev/null and b/lib/pytz/zoneinfo/America/Iqaluit differ diff --git a/lib/pytz/zoneinfo/America/Jamaica b/lib/pytz/zoneinfo/America/Jamaica new file mode 100644 index 00000000..006689bc Binary files /dev/null and b/lib/pytz/zoneinfo/America/Jamaica differ diff --git a/lib/pytz/zoneinfo/America/Jujuy b/lib/pytz/zoneinfo/America/Jujuy new file mode 100644 index 00000000..7be3eeb6 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Jujuy differ diff --git a/lib/pytz/zoneinfo/America/Juneau b/lib/pytz/zoneinfo/America/Juneau new file mode 100644 index 00000000..ade50a8e Binary files /dev/null and b/lib/pytz/zoneinfo/America/Juneau differ diff --git a/lib/pytz/zoneinfo/America/Kentucky/Louisville b/lib/pytz/zoneinfo/America/Kentucky/Louisville new file mode 100644 index 00000000..fdf2e88b Binary files /dev/null and b/lib/pytz/zoneinfo/America/Kentucky/Louisville differ diff --git a/lib/pytz/zoneinfo/America/Kentucky/Monticello b/lib/pytz/zoneinfo/America/Kentucky/Monticello new file mode 100644 index 00000000..60991aa3 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Kentucky/Monticello differ diff --git a/lib/pytz/zoneinfo/America/Knox_IN b/lib/pytz/zoneinfo/America/Knox_IN new file mode 100644 index 00000000..cc785da9 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Knox_IN differ diff --git a/lib/pytz/zoneinfo/America/Kralendijk b/lib/pytz/zoneinfo/America/Kralendijk new file mode 100644 index 00000000..05e77ab4 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Kralendijk differ diff --git a/lib/pytz/zoneinfo/America/La_Paz b/lib/pytz/zoneinfo/America/La_Paz new file mode 100644 index 00000000..cedf0b3a Binary files /dev/null and b/lib/pytz/zoneinfo/America/La_Paz differ diff --git a/lib/pytz/zoneinfo/America/Lima b/lib/pytz/zoneinfo/America/Lima new file mode 100644 index 00000000..789fa5c2 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Lima differ diff --git a/lib/pytz/zoneinfo/America/Los_Angeles b/lib/pytz/zoneinfo/America/Los_Angeles new file mode 100644 index 00000000..1fa9149f Binary files /dev/null and b/lib/pytz/zoneinfo/America/Los_Angeles differ diff --git a/lib/pytz/zoneinfo/America/Louisville b/lib/pytz/zoneinfo/America/Louisville new file mode 100644 index 00000000..fdf2e88b Binary files /dev/null and b/lib/pytz/zoneinfo/America/Louisville differ diff --git a/lib/pytz/zoneinfo/America/Lower_Princes b/lib/pytz/zoneinfo/America/Lower_Princes new file mode 100644 index 00000000..05e77ab4 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Lower_Princes differ diff --git a/lib/pytz/zoneinfo/America/Maceio b/lib/pytz/zoneinfo/America/Maceio new file mode 100644 index 00000000..de749909 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Maceio differ diff --git a/lib/pytz/zoneinfo/America/Managua b/lib/pytz/zoneinfo/America/Managua new file mode 100644 index 00000000..c543ffd4 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Managua differ diff --git a/lib/pytz/zoneinfo/America/Manaus b/lib/pytz/zoneinfo/America/Manaus new file mode 100644 index 00000000..e0222f18 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Manaus differ diff --git a/lib/pytz/zoneinfo/America/Marigot b/lib/pytz/zoneinfo/America/Marigot new file mode 100644 index 00000000..447efbe2 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Marigot differ diff --git a/lib/pytz/zoneinfo/America/Martinique b/lib/pytz/zoneinfo/America/Martinique new file mode 100644 index 00000000..f9e2399c Binary files /dev/null and b/lib/pytz/zoneinfo/America/Martinique differ diff --git a/lib/pytz/zoneinfo/America/Matamoros b/lib/pytz/zoneinfo/America/Matamoros new file mode 100644 index 00000000..5671d258 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Matamoros differ diff --git a/lib/pytz/zoneinfo/America/Mazatlan b/lib/pytz/zoneinfo/America/Mazatlan new file mode 100644 index 00000000..afa94c2a Binary files /dev/null and b/lib/pytz/zoneinfo/America/Mazatlan differ diff --git a/lib/pytz/zoneinfo/America/Mendoza b/lib/pytz/zoneinfo/America/Mendoza new file mode 100644 index 00000000..f9eb526c Binary files /dev/null and b/lib/pytz/zoneinfo/America/Mendoza differ diff --git a/lib/pytz/zoneinfo/America/Menominee b/lib/pytz/zoneinfo/America/Menominee new file mode 100644 index 00000000..55d6e326 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Menominee differ diff --git a/lib/pytz/zoneinfo/America/Merida b/lib/pytz/zoneinfo/America/Merida new file mode 100644 index 00000000..ecc1856e Binary files /dev/null and b/lib/pytz/zoneinfo/America/Merida differ diff --git a/lib/pytz/zoneinfo/America/Metlakatla b/lib/pytz/zoneinfo/America/Metlakatla new file mode 100644 index 00000000..e66cc341 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Metlakatla differ diff --git a/lib/pytz/zoneinfo/America/Mexico_City b/lib/pytz/zoneinfo/America/Mexico_City new file mode 100644 index 00000000..f11e3d2d Binary files /dev/null and b/lib/pytz/zoneinfo/America/Mexico_City differ diff --git a/lib/pytz/zoneinfo/America/Miquelon b/lib/pytz/zoneinfo/America/Miquelon new file mode 100644 index 00000000..36f66961 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Miquelon differ diff --git a/lib/pytz/zoneinfo/America/Moncton b/lib/pytz/zoneinfo/America/Moncton new file mode 100644 index 00000000..51cb1ba3 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Moncton differ diff --git a/lib/pytz/zoneinfo/America/Monterrey b/lib/pytz/zoneinfo/America/Monterrey new file mode 100644 index 00000000..dcac92ba Binary files /dev/null and b/lib/pytz/zoneinfo/America/Monterrey differ diff --git a/lib/pytz/zoneinfo/America/Montevideo b/lib/pytz/zoneinfo/America/Montevideo new file mode 100644 index 00000000..ab3d6807 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Montevideo differ diff --git a/lib/pytz/zoneinfo/America/Montreal b/lib/pytz/zoneinfo/America/Montreal new file mode 100644 index 00000000..89b9f493 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Montreal differ diff --git a/lib/pytz/zoneinfo/America/Montserrat b/lib/pytz/zoneinfo/America/Montserrat new file mode 100644 index 00000000..447efbe2 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Montserrat differ diff --git a/lib/pytz/zoneinfo/America/Nassau b/lib/pytz/zoneinfo/America/Nassau new file mode 100644 index 00000000..e5d0289b Binary files /dev/null and b/lib/pytz/zoneinfo/America/Nassau differ diff --git a/lib/pytz/zoneinfo/America/New_York b/lib/pytz/zoneinfo/America/New_York new file mode 100644 index 00000000..7553fee3 Binary files /dev/null and b/lib/pytz/zoneinfo/America/New_York differ diff --git a/lib/pytz/zoneinfo/America/Nipigon b/lib/pytz/zoneinfo/America/Nipigon new file mode 100644 index 00000000..f8a0292b Binary files /dev/null and b/lib/pytz/zoneinfo/America/Nipigon differ diff --git a/lib/pytz/zoneinfo/America/Nome b/lib/pytz/zoneinfo/America/Nome new file mode 100644 index 00000000..d370ab14 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Nome differ diff --git a/lib/pytz/zoneinfo/America/Noronha b/lib/pytz/zoneinfo/America/Noronha new file mode 100644 index 00000000..774b14e6 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Noronha differ diff --git a/lib/pytz/zoneinfo/America/North_Dakota/Beulah b/lib/pytz/zoneinfo/America/North_Dakota/Beulah new file mode 100644 index 00000000..8174c882 Binary files /dev/null and b/lib/pytz/zoneinfo/America/North_Dakota/Beulah differ diff --git a/lib/pytz/zoneinfo/America/North_Dakota/Center b/lib/pytz/zoneinfo/America/North_Dakota/Center new file mode 100644 index 00000000..8035b24f Binary files /dev/null and b/lib/pytz/zoneinfo/America/North_Dakota/Center differ diff --git a/lib/pytz/zoneinfo/America/North_Dakota/New_Salem b/lib/pytz/zoneinfo/America/North_Dakota/New_Salem new file mode 100644 index 00000000..5b630ee6 Binary files /dev/null and b/lib/pytz/zoneinfo/America/North_Dakota/New_Salem differ diff --git a/lib/pytz/zoneinfo/America/Ojinaga b/lib/pytz/zoneinfo/America/Ojinaga new file mode 100644 index 00000000..190c5c86 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Ojinaga differ diff --git a/lib/pytz/zoneinfo/America/Panama b/lib/pytz/zoneinfo/America/Panama new file mode 100644 index 00000000..5c1c0637 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Panama differ diff --git a/lib/pytz/zoneinfo/America/Pangnirtung b/lib/pytz/zoneinfo/America/Pangnirtung new file mode 100644 index 00000000..994da430 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Pangnirtung differ diff --git a/lib/pytz/zoneinfo/America/Paramaribo b/lib/pytz/zoneinfo/America/Paramaribo new file mode 100644 index 00000000..2f05b236 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Paramaribo differ diff --git a/lib/pytz/zoneinfo/America/Phoenix b/lib/pytz/zoneinfo/America/Phoenix new file mode 100644 index 00000000..adf28236 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Phoenix differ diff --git a/lib/pytz/zoneinfo/America/Port-au-Prince b/lib/pytz/zoneinfo/America/Port-au-Prince new file mode 100644 index 00000000..dd8895c0 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Port-au-Prince differ diff --git a/lib/pytz/zoneinfo/America/Port_of_Spain b/lib/pytz/zoneinfo/America/Port_of_Spain new file mode 100644 index 00000000..447efbe2 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Port_of_Spain differ diff --git a/lib/pytz/zoneinfo/America/Porto_Acre b/lib/pytz/zoneinfo/America/Porto_Acre new file mode 100644 index 00000000..788d0e9c Binary files /dev/null and b/lib/pytz/zoneinfo/America/Porto_Acre differ diff --git a/lib/pytz/zoneinfo/America/Porto_Velho b/lib/pytz/zoneinfo/America/Porto_Velho new file mode 100644 index 00000000..aa8cf315 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Porto_Velho differ diff --git a/lib/pytz/zoneinfo/America/Puerto_Rico b/lib/pytz/zoneinfo/America/Puerto_Rico new file mode 100644 index 00000000..d4525a68 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Puerto_Rico differ diff --git a/lib/pytz/zoneinfo/America/Rainy_River b/lib/pytz/zoneinfo/America/Rainy_River new file mode 100644 index 00000000..70dcd2d8 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Rainy_River differ diff --git a/lib/pytz/zoneinfo/America/Rankin_Inlet b/lib/pytz/zoneinfo/America/Rankin_Inlet new file mode 100644 index 00000000..cc15d831 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Rankin_Inlet differ diff --git a/lib/pytz/zoneinfo/America/Recife b/lib/pytz/zoneinfo/America/Recife new file mode 100644 index 00000000..f0ad7b98 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Recife differ diff --git a/lib/pytz/zoneinfo/America/Regina b/lib/pytz/zoneinfo/America/Regina new file mode 100644 index 00000000..5fe8d6b6 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Regina differ diff --git a/lib/pytz/zoneinfo/America/Resolute b/lib/pytz/zoneinfo/America/Resolute new file mode 100644 index 00000000..53079413 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Resolute differ diff --git a/lib/pytz/zoneinfo/America/Rio_Branco b/lib/pytz/zoneinfo/America/Rio_Branco new file mode 100644 index 00000000..788d0e9c Binary files /dev/null and b/lib/pytz/zoneinfo/America/Rio_Branco differ diff --git a/lib/pytz/zoneinfo/America/Rosario b/lib/pytz/zoneinfo/America/Rosario new file mode 100644 index 00000000..cd97a24b Binary files /dev/null and b/lib/pytz/zoneinfo/America/Rosario differ diff --git a/lib/pytz/zoneinfo/America/Santa_Isabel b/lib/pytz/zoneinfo/America/Santa_Isabel new file mode 100644 index 00000000..e1c4d161 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Santa_Isabel differ diff --git a/lib/pytz/zoneinfo/America/Santarem b/lib/pytz/zoneinfo/America/Santarem new file mode 100644 index 00000000..bb469d39 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Santarem differ diff --git a/lib/pytz/zoneinfo/America/Santiago b/lib/pytz/zoneinfo/America/Santiago new file mode 100644 index 00000000..92cf5597 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Santiago differ diff --git a/lib/pytz/zoneinfo/America/Santo_Domingo b/lib/pytz/zoneinfo/America/Santo_Domingo new file mode 100644 index 00000000..77eab315 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Santo_Domingo differ diff --git a/lib/pytz/zoneinfo/America/Sao_Paulo b/lib/pytz/zoneinfo/America/Sao_Paulo new file mode 100644 index 00000000..552ce7c2 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Sao_Paulo differ diff --git a/lib/pytz/zoneinfo/America/Scoresbysund b/lib/pytz/zoneinfo/America/Scoresbysund new file mode 100644 index 00000000..85676ca3 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Scoresbysund differ diff --git a/lib/pytz/zoneinfo/America/Shiprock b/lib/pytz/zoneinfo/America/Shiprock new file mode 100644 index 00000000..7fc66917 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Shiprock differ diff --git a/lib/pytz/zoneinfo/America/Sitka b/lib/pytz/zoneinfo/America/Sitka new file mode 100644 index 00000000..48fc6aff Binary files /dev/null and b/lib/pytz/zoneinfo/America/Sitka differ diff --git a/lib/pytz/zoneinfo/America/St_Barthelemy b/lib/pytz/zoneinfo/America/St_Barthelemy new file mode 100644 index 00000000..447efbe2 Binary files /dev/null and b/lib/pytz/zoneinfo/America/St_Barthelemy differ diff --git a/lib/pytz/zoneinfo/America/St_Johns b/lib/pytz/zoneinfo/America/St_Johns new file mode 100644 index 00000000..a1d14854 Binary files /dev/null and b/lib/pytz/zoneinfo/America/St_Johns differ diff --git a/lib/pytz/zoneinfo/America/St_Kitts b/lib/pytz/zoneinfo/America/St_Kitts new file mode 100644 index 00000000..447efbe2 Binary files /dev/null and b/lib/pytz/zoneinfo/America/St_Kitts differ diff --git a/lib/pytz/zoneinfo/America/St_Lucia b/lib/pytz/zoneinfo/America/St_Lucia new file mode 100644 index 00000000..447efbe2 Binary files /dev/null and b/lib/pytz/zoneinfo/America/St_Lucia differ diff --git a/lib/pytz/zoneinfo/America/St_Thomas b/lib/pytz/zoneinfo/America/St_Thomas new file mode 100644 index 00000000..447efbe2 Binary files /dev/null and b/lib/pytz/zoneinfo/America/St_Thomas differ diff --git a/lib/pytz/zoneinfo/America/St_Vincent b/lib/pytz/zoneinfo/America/St_Vincent new file mode 100644 index 00000000..447efbe2 Binary files /dev/null and b/lib/pytz/zoneinfo/America/St_Vincent differ diff --git a/lib/pytz/zoneinfo/America/Swift_Current b/lib/pytz/zoneinfo/America/Swift_Current new file mode 100644 index 00000000..4db1300a Binary files /dev/null and b/lib/pytz/zoneinfo/America/Swift_Current differ diff --git a/lib/pytz/zoneinfo/America/Tegucigalpa b/lib/pytz/zoneinfo/America/Tegucigalpa new file mode 100644 index 00000000..7aea8f99 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Tegucigalpa differ diff --git a/lib/pytz/zoneinfo/America/Thule b/lib/pytz/zoneinfo/America/Thule new file mode 100644 index 00000000..deefcc8d Binary files /dev/null and b/lib/pytz/zoneinfo/America/Thule differ diff --git a/lib/pytz/zoneinfo/America/Thunder_Bay b/lib/pytz/zoneinfo/America/Thunder_Bay new file mode 100644 index 00000000..aa1d4860 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Thunder_Bay differ diff --git a/lib/pytz/zoneinfo/America/Tijuana b/lib/pytz/zoneinfo/America/Tijuana new file mode 100644 index 00000000..13874753 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Tijuana differ diff --git a/lib/pytz/zoneinfo/America/Toronto b/lib/pytz/zoneinfo/America/Toronto new file mode 100644 index 00000000..7b4682a3 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Toronto differ diff --git a/lib/pytz/zoneinfo/America/Tortola b/lib/pytz/zoneinfo/America/Tortola new file mode 100644 index 00000000..447efbe2 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Tortola differ diff --git a/lib/pytz/zoneinfo/America/Vancouver b/lib/pytz/zoneinfo/America/Vancouver new file mode 100644 index 00000000..9b5d9241 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Vancouver differ diff --git a/lib/pytz/zoneinfo/America/Virgin b/lib/pytz/zoneinfo/America/Virgin new file mode 100644 index 00000000..447efbe2 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Virgin differ diff --git a/lib/pytz/zoneinfo/America/Whitehorse b/lib/pytz/zoneinfo/America/Whitehorse new file mode 100644 index 00000000..8604c5c5 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Whitehorse differ diff --git a/lib/pytz/zoneinfo/America/Winnipeg b/lib/pytz/zoneinfo/America/Winnipeg new file mode 100644 index 00000000..2ffe3d8d Binary files /dev/null and b/lib/pytz/zoneinfo/America/Winnipeg differ diff --git a/lib/pytz/zoneinfo/America/Yakutat b/lib/pytz/zoneinfo/America/Yakutat new file mode 100644 index 00000000..f3d73990 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Yakutat differ diff --git a/lib/pytz/zoneinfo/America/Yellowknife b/lib/pytz/zoneinfo/America/Yellowknife new file mode 100644 index 00000000..21cba6e4 Binary files /dev/null and b/lib/pytz/zoneinfo/America/Yellowknife differ diff --git a/lib/pytz/zoneinfo/Antarctica/Casey b/lib/pytz/zoneinfo/Antarctica/Casey new file mode 100644 index 00000000..c2a99056 Binary files /dev/null and b/lib/pytz/zoneinfo/Antarctica/Casey differ diff --git a/lib/pytz/zoneinfo/Antarctica/Davis b/lib/pytz/zoneinfo/Antarctica/Davis new file mode 100644 index 00000000..7321c67f Binary files /dev/null and b/lib/pytz/zoneinfo/Antarctica/Davis differ diff --git a/lib/pytz/zoneinfo/Antarctica/DumontDUrville b/lib/pytz/zoneinfo/Antarctica/DumontDUrville new file mode 100644 index 00000000..c406b8d5 Binary files /dev/null and b/lib/pytz/zoneinfo/Antarctica/DumontDUrville differ diff --git a/lib/pytz/zoneinfo/Antarctica/Macquarie b/lib/pytz/zoneinfo/Antarctica/Macquarie new file mode 100644 index 00000000..fc7b96fe Binary files /dev/null and b/lib/pytz/zoneinfo/Antarctica/Macquarie differ diff --git a/lib/pytz/zoneinfo/Antarctica/Mawson b/lib/pytz/zoneinfo/Antarctica/Mawson new file mode 100644 index 00000000..6c5b0fa1 Binary files /dev/null and b/lib/pytz/zoneinfo/Antarctica/Mawson differ diff --git a/lib/pytz/zoneinfo/Antarctica/McMurdo b/lib/pytz/zoneinfo/Antarctica/McMurdo new file mode 100644 index 00000000..a5f5b6d5 Binary files /dev/null and b/lib/pytz/zoneinfo/Antarctica/McMurdo differ diff --git a/lib/pytz/zoneinfo/Antarctica/Palmer b/lib/pytz/zoneinfo/Antarctica/Palmer new file mode 100644 index 00000000..9e9cdd0b Binary files /dev/null and b/lib/pytz/zoneinfo/Antarctica/Palmer differ diff --git a/lib/pytz/zoneinfo/Antarctica/Rothera b/lib/pytz/zoneinfo/Antarctica/Rothera new file mode 100644 index 00000000..28f82baa Binary files /dev/null and b/lib/pytz/zoneinfo/Antarctica/Rothera differ diff --git a/lib/pytz/zoneinfo/Antarctica/South_Pole b/lib/pytz/zoneinfo/Antarctica/South_Pole new file mode 100644 index 00000000..a5f5b6d5 Binary files /dev/null and b/lib/pytz/zoneinfo/Antarctica/South_Pole differ diff --git a/lib/pytz/zoneinfo/Antarctica/Syowa b/lib/pytz/zoneinfo/Antarctica/Syowa new file mode 100644 index 00000000..b837b071 Binary files /dev/null and b/lib/pytz/zoneinfo/Antarctica/Syowa differ diff --git a/lib/pytz/zoneinfo/Antarctica/Troll b/lib/pytz/zoneinfo/Antarctica/Troll new file mode 100644 index 00000000..d973a122 Binary files /dev/null and b/lib/pytz/zoneinfo/Antarctica/Troll differ diff --git a/lib/pytz/zoneinfo/Antarctica/Vostok b/lib/pytz/zoneinfo/Antarctica/Vostok new file mode 100644 index 00000000..cbec909e Binary files /dev/null and b/lib/pytz/zoneinfo/Antarctica/Vostok differ diff --git a/lib/pytz/zoneinfo/Arctic/Longyearbyen b/lib/pytz/zoneinfo/Arctic/Longyearbyen new file mode 100644 index 00000000..239c0174 Binary files /dev/null and b/lib/pytz/zoneinfo/Arctic/Longyearbyen differ diff --git a/lib/pytz/zoneinfo/Asia/Aden b/lib/pytz/zoneinfo/Asia/Aden new file mode 100644 index 00000000..505e1d22 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Aden differ diff --git a/lib/pytz/zoneinfo/Asia/Almaty b/lib/pytz/zoneinfo/Asia/Almaty new file mode 100644 index 00000000..75a007de Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Almaty differ diff --git a/lib/pytz/zoneinfo/Asia/Amman b/lib/pytz/zoneinfo/Asia/Amman new file mode 100644 index 00000000..c3f0994a Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Amman differ diff --git a/lib/pytz/zoneinfo/Asia/Anadyr b/lib/pytz/zoneinfo/Asia/Anadyr new file mode 100644 index 00000000..766594bc Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Anadyr differ diff --git a/lib/pytz/zoneinfo/Asia/Aqtau b/lib/pytz/zoneinfo/Asia/Aqtau new file mode 100644 index 00000000..811ed2f9 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Aqtau differ diff --git a/lib/pytz/zoneinfo/Asia/Aqtobe b/lib/pytz/zoneinfo/Asia/Aqtobe new file mode 100644 index 00000000..ff3b96b3 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Aqtobe differ diff --git a/lib/pytz/zoneinfo/Asia/Ashgabat b/lib/pytz/zoneinfo/Asia/Ashgabat new file mode 100644 index 00000000..f79fe046 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Ashgabat differ diff --git a/lib/pytz/zoneinfo/Asia/Ashkhabad b/lib/pytz/zoneinfo/Asia/Ashkhabad new file mode 100644 index 00000000..f79fe046 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Ashkhabad differ diff --git a/lib/pytz/zoneinfo/Asia/Baghdad b/lib/pytz/zoneinfo/Asia/Baghdad new file mode 100644 index 00000000..f0a96ec3 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Baghdad differ diff --git a/lib/pytz/zoneinfo/Asia/Bahrain b/lib/pytz/zoneinfo/Asia/Bahrain new file mode 100644 index 00000000..cda04a15 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Bahrain differ diff --git a/lib/pytz/zoneinfo/Asia/Baku b/lib/pytz/zoneinfo/Asia/Baku new file mode 100644 index 00000000..f78e7645 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Baku differ diff --git a/lib/pytz/zoneinfo/Asia/Bangkok b/lib/pytz/zoneinfo/Asia/Bangkok new file mode 100644 index 00000000..e8e76276 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Bangkok differ diff --git a/lib/pytz/zoneinfo/Asia/Beirut b/lib/pytz/zoneinfo/Asia/Beirut new file mode 100644 index 00000000..72f08963 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Beirut differ diff --git a/lib/pytz/zoneinfo/Asia/Bishkek b/lib/pytz/zoneinfo/Asia/Bishkek new file mode 100644 index 00000000..eee8278a Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Bishkek differ diff --git a/lib/pytz/zoneinfo/Asia/Brunei b/lib/pytz/zoneinfo/Asia/Brunei new file mode 100644 index 00000000..1ac3115a Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Brunei differ diff --git a/lib/pytz/zoneinfo/Asia/Calcutta b/lib/pytz/zoneinfo/Asia/Calcutta new file mode 100644 index 00000000..3c0d5abc Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Calcutta differ diff --git a/lib/pytz/zoneinfo/Asia/Chita b/lib/pytz/zoneinfo/Asia/Chita new file mode 100644 index 00000000..c0906547 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Chita differ diff --git a/lib/pytz/zoneinfo/Asia/Choibalsan b/lib/pytz/zoneinfo/Asia/Choibalsan new file mode 100644 index 00000000..f0990926 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Choibalsan differ diff --git a/lib/pytz/zoneinfo/Asia/Chongqing b/lib/pytz/zoneinfo/Asia/Chongqing new file mode 100644 index 00000000..dbd132f2 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Chongqing differ diff --git a/lib/pytz/zoneinfo/Asia/Chungking b/lib/pytz/zoneinfo/Asia/Chungking new file mode 100644 index 00000000..dbd132f2 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Chungking differ diff --git a/lib/pytz/zoneinfo/Asia/Colombo b/lib/pytz/zoneinfo/Asia/Colombo new file mode 100644 index 00000000..d10439af Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Colombo differ diff --git a/lib/pytz/zoneinfo/Asia/Dacca b/lib/pytz/zoneinfo/Asia/Dacca new file mode 100644 index 00000000..b6b326b2 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Dacca differ diff --git a/lib/pytz/zoneinfo/Asia/Damascus b/lib/pytz/zoneinfo/Asia/Damascus new file mode 100644 index 00000000..ac457646 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Damascus differ diff --git a/lib/pytz/zoneinfo/Asia/Dhaka b/lib/pytz/zoneinfo/Asia/Dhaka new file mode 100644 index 00000000..b6b326b2 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Dhaka differ diff --git a/lib/pytz/zoneinfo/Asia/Dili b/lib/pytz/zoneinfo/Asia/Dili new file mode 100644 index 00000000..8124fb70 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Dili differ diff --git a/lib/pytz/zoneinfo/Asia/Dubai b/lib/pytz/zoneinfo/Asia/Dubai new file mode 100644 index 00000000..415e443c Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Dubai differ diff --git a/lib/pytz/zoneinfo/Asia/Dushanbe b/lib/pytz/zoneinfo/Asia/Dushanbe new file mode 100644 index 00000000..3b1e978b Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Dushanbe differ diff --git a/lib/pytz/zoneinfo/Asia/Gaza b/lib/pytz/zoneinfo/Asia/Gaza new file mode 100644 index 00000000..bd683e83 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Gaza differ diff --git a/lib/pytz/zoneinfo/Asia/Harbin b/lib/pytz/zoneinfo/Asia/Harbin new file mode 100644 index 00000000..dbd132f2 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Harbin differ diff --git a/lib/pytz/zoneinfo/Asia/Hebron b/lib/pytz/zoneinfo/Asia/Hebron new file mode 100644 index 00000000..0bc7674b Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Hebron differ diff --git a/lib/pytz/zoneinfo/Asia/Ho_Chi_Minh b/lib/pytz/zoneinfo/Asia/Ho_Chi_Minh new file mode 100644 index 00000000..c1422657 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Ho_Chi_Minh differ diff --git a/lib/pytz/zoneinfo/Asia/Hong_Kong b/lib/pytz/zoneinfo/Asia/Hong_Kong new file mode 100644 index 00000000..dc9058e4 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Hong_Kong differ diff --git a/lib/pytz/zoneinfo/Asia/Hovd b/lib/pytz/zoneinfo/Asia/Hovd new file mode 100644 index 00000000..71c3cad4 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Hovd differ diff --git a/lib/pytz/zoneinfo/Asia/Irkutsk b/lib/pytz/zoneinfo/Asia/Irkutsk new file mode 100644 index 00000000..1e94a479 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Irkutsk differ diff --git a/lib/pytz/zoneinfo/Asia/Istanbul b/lib/pytz/zoneinfo/Asia/Istanbul new file mode 100644 index 00000000..d89aa3a8 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Istanbul differ diff --git a/lib/pytz/zoneinfo/Asia/Jakarta b/lib/pytz/zoneinfo/Asia/Jakarta new file mode 100644 index 00000000..3130bff5 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Jakarta differ diff --git a/lib/pytz/zoneinfo/Asia/Jayapura b/lib/pytz/zoneinfo/Asia/Jayapura new file mode 100644 index 00000000..a9d12177 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Jayapura differ diff --git a/lib/pytz/zoneinfo/Asia/Jerusalem b/lib/pytz/zoneinfo/Asia/Jerusalem new file mode 100644 index 00000000..df511993 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Jerusalem differ diff --git a/lib/pytz/zoneinfo/Asia/Kabul b/lib/pytz/zoneinfo/Asia/Kabul new file mode 100644 index 00000000..266cc7e9 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Kabul differ diff --git a/lib/pytz/zoneinfo/Asia/Kamchatka b/lib/pytz/zoneinfo/Asia/Kamchatka new file mode 100644 index 00000000..a0541cfa Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Kamchatka differ diff --git a/lib/pytz/zoneinfo/Asia/Karachi b/lib/pytz/zoneinfo/Asia/Karachi new file mode 100644 index 00000000..6a6de1b2 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Karachi differ diff --git a/lib/pytz/zoneinfo/Asia/Kashgar b/lib/pytz/zoneinfo/Asia/Kashgar new file mode 100644 index 00000000..964a5c24 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Kashgar differ diff --git a/lib/pytz/zoneinfo/Asia/Kathmandu b/lib/pytz/zoneinfo/Asia/Kathmandu new file mode 100644 index 00000000..28247098 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Kathmandu differ diff --git a/lib/pytz/zoneinfo/Asia/Katmandu b/lib/pytz/zoneinfo/Asia/Katmandu new file mode 100644 index 00000000..28247098 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Katmandu differ diff --git a/lib/pytz/zoneinfo/Asia/Khandyga b/lib/pytz/zoneinfo/Asia/Khandyga new file mode 100644 index 00000000..26becb32 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Khandyga differ diff --git a/lib/pytz/zoneinfo/Asia/Kolkata b/lib/pytz/zoneinfo/Asia/Kolkata new file mode 100644 index 00000000..3c0d5abc Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Kolkata differ diff --git a/lib/pytz/zoneinfo/Asia/Krasnoyarsk b/lib/pytz/zoneinfo/Asia/Krasnoyarsk new file mode 100644 index 00000000..31078090 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Krasnoyarsk differ diff --git a/lib/pytz/zoneinfo/Asia/Kuala_Lumpur b/lib/pytz/zoneinfo/Asia/Kuala_Lumpur new file mode 100644 index 00000000..35b987d2 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Kuala_Lumpur differ diff --git a/lib/pytz/zoneinfo/Asia/Kuching b/lib/pytz/zoneinfo/Asia/Kuching new file mode 100644 index 00000000..4f891db7 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Kuching differ diff --git a/lib/pytz/zoneinfo/Asia/Kuwait b/lib/pytz/zoneinfo/Asia/Kuwait new file mode 100644 index 00000000..5623811d Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Kuwait differ diff --git a/lib/pytz/zoneinfo/Asia/Macao b/lib/pytz/zoneinfo/Asia/Macao new file mode 100644 index 00000000..b8f9c369 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Macao differ diff --git a/lib/pytz/zoneinfo/Asia/Macau b/lib/pytz/zoneinfo/Asia/Macau new file mode 100644 index 00000000..b8f9c369 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Macau differ diff --git a/lib/pytz/zoneinfo/Asia/Magadan b/lib/pytz/zoneinfo/Asia/Magadan new file mode 100644 index 00000000..e09c4dc2 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Magadan differ diff --git a/lib/pytz/zoneinfo/Asia/Makassar b/lib/pytz/zoneinfo/Asia/Makassar new file mode 100644 index 00000000..0d689236 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Makassar differ diff --git a/lib/pytz/zoneinfo/Asia/Manila b/lib/pytz/zoneinfo/Asia/Manila new file mode 100644 index 00000000..ac0f3a63 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Manila differ diff --git a/lib/pytz/zoneinfo/Asia/Muscat b/lib/pytz/zoneinfo/Asia/Muscat new file mode 100644 index 00000000..53a22190 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Muscat differ diff --git a/lib/pytz/zoneinfo/Asia/Nicosia b/lib/pytz/zoneinfo/Asia/Nicosia new file mode 100644 index 00000000..3e663b21 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Nicosia differ diff --git a/lib/pytz/zoneinfo/Asia/Novokuznetsk b/lib/pytz/zoneinfo/Asia/Novokuznetsk new file mode 100644 index 00000000..11768662 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Novokuznetsk differ diff --git a/lib/pytz/zoneinfo/Asia/Novosibirsk b/lib/pytz/zoneinfo/Asia/Novosibirsk new file mode 100644 index 00000000..ed6d7dc5 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Novosibirsk differ diff --git a/lib/pytz/zoneinfo/Asia/Omsk b/lib/pytz/zoneinfo/Asia/Omsk new file mode 100644 index 00000000..760c3910 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Omsk differ diff --git a/lib/pytz/zoneinfo/Asia/Oral b/lib/pytz/zoneinfo/Asia/Oral new file mode 100644 index 00000000..1467cafc Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Oral differ diff --git a/lib/pytz/zoneinfo/Asia/Phnom_Penh b/lib/pytz/zoneinfo/Asia/Phnom_Penh new file mode 100644 index 00000000..e8e76276 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Phnom_Penh differ diff --git a/lib/pytz/zoneinfo/Asia/Pontianak b/lib/pytz/zoneinfo/Asia/Pontianak new file mode 100644 index 00000000..dcd70140 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Pontianak differ diff --git a/lib/pytz/zoneinfo/Asia/Pyongyang b/lib/pytz/zoneinfo/Asia/Pyongyang new file mode 100644 index 00000000..b04bf74e Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Pyongyang differ diff --git a/lib/pytz/zoneinfo/Asia/Qatar b/lib/pytz/zoneinfo/Asia/Qatar new file mode 100644 index 00000000..3e203739 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Qatar differ diff --git a/lib/pytz/zoneinfo/Asia/Qyzylorda b/lib/pytz/zoneinfo/Asia/Qyzylorda new file mode 100644 index 00000000..ce535161 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Qyzylorda differ diff --git a/lib/pytz/zoneinfo/Asia/Rangoon b/lib/pytz/zoneinfo/Asia/Rangoon new file mode 100644 index 00000000..934ca7ef Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Rangoon differ diff --git a/lib/pytz/zoneinfo/Asia/Riyadh b/lib/pytz/zoneinfo/Asia/Riyadh new file mode 100644 index 00000000..c35e42a1 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Riyadh differ diff --git a/lib/pytz/zoneinfo/Asia/Saigon b/lib/pytz/zoneinfo/Asia/Saigon new file mode 100644 index 00000000..c1422657 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Saigon differ diff --git a/lib/pytz/zoneinfo/Asia/Sakhalin b/lib/pytz/zoneinfo/Asia/Sakhalin new file mode 100644 index 00000000..ec62afc5 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Sakhalin differ diff --git a/lib/pytz/zoneinfo/Asia/Samarkand b/lib/pytz/zoneinfo/Asia/Samarkand new file mode 100644 index 00000000..65fb5b03 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Samarkand differ diff --git a/lib/pytz/zoneinfo/Asia/Seoul b/lib/pytz/zoneinfo/Asia/Seoul new file mode 100644 index 00000000..fd91d5b7 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Seoul differ diff --git a/lib/pytz/zoneinfo/Asia/Shanghai b/lib/pytz/zoneinfo/Asia/Shanghai new file mode 100644 index 00000000..dbd132f2 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Shanghai differ diff --git a/lib/pytz/zoneinfo/Asia/Singapore b/lib/pytz/zoneinfo/Asia/Singapore new file mode 100644 index 00000000..9dd49cb7 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Singapore differ diff --git a/lib/pytz/zoneinfo/Asia/Srednekolymsk b/lib/pytz/zoneinfo/Asia/Srednekolymsk new file mode 100644 index 00000000..0929f66d Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Srednekolymsk differ diff --git a/lib/pytz/zoneinfo/Asia/Taipei b/lib/pytz/zoneinfo/Asia/Taipei new file mode 100644 index 00000000..4810a0b6 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Taipei differ diff --git a/lib/pytz/zoneinfo/Asia/Tashkent b/lib/pytz/zoneinfo/Asia/Tashkent new file mode 100644 index 00000000..1f59faa5 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Tashkent differ diff --git a/lib/pytz/zoneinfo/Asia/Tbilisi b/lib/pytz/zoneinfo/Asia/Tbilisi new file mode 100644 index 00000000..0d7081e9 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Tbilisi differ diff --git a/lib/pytz/zoneinfo/Asia/Tehran b/lib/pytz/zoneinfo/Asia/Tehran new file mode 100644 index 00000000..87107811 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Tehran differ diff --git a/lib/pytz/zoneinfo/Asia/Tel_Aviv b/lib/pytz/zoneinfo/Asia/Tel_Aviv new file mode 100644 index 00000000..df511993 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Tel_Aviv differ diff --git a/lib/pytz/zoneinfo/Asia/Thimbu b/lib/pytz/zoneinfo/Asia/Thimbu new file mode 100644 index 00000000..0bd94cb4 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Thimbu differ diff --git a/lib/pytz/zoneinfo/Asia/Thimphu b/lib/pytz/zoneinfo/Asia/Thimphu new file mode 100644 index 00000000..0bd94cb4 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Thimphu differ diff --git a/lib/pytz/zoneinfo/Asia/Tokyo b/lib/pytz/zoneinfo/Asia/Tokyo new file mode 100644 index 00000000..02441403 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Tokyo differ diff --git a/lib/pytz/zoneinfo/Asia/Ujung_Pandang b/lib/pytz/zoneinfo/Asia/Ujung_Pandang new file mode 100644 index 00000000..0d689236 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Ujung_Pandang differ diff --git a/lib/pytz/zoneinfo/Asia/Ulaanbaatar b/lib/pytz/zoneinfo/Asia/Ulaanbaatar new file mode 100644 index 00000000..61505e95 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Ulaanbaatar differ diff --git a/lib/pytz/zoneinfo/Asia/Ulan_Bator b/lib/pytz/zoneinfo/Asia/Ulan_Bator new file mode 100644 index 00000000..61505e95 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Ulan_Bator differ diff --git a/lib/pytz/zoneinfo/Asia/Urumqi b/lib/pytz/zoneinfo/Asia/Urumqi new file mode 100644 index 00000000..964a5c24 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Urumqi differ diff --git a/lib/pytz/zoneinfo/Asia/Ust-Nera b/lib/pytz/zoneinfo/Asia/Ust-Nera new file mode 100644 index 00000000..0efacd6b Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Ust-Nera differ diff --git a/lib/pytz/zoneinfo/Asia/Vientiane b/lib/pytz/zoneinfo/Asia/Vientiane new file mode 100644 index 00000000..e8e76276 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Vientiane differ diff --git a/lib/pytz/zoneinfo/Asia/Vladivostok b/lib/pytz/zoneinfo/Asia/Vladivostok new file mode 100644 index 00000000..156c8e6f Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Vladivostok differ diff --git a/lib/pytz/zoneinfo/Asia/Yakutsk b/lib/pytz/zoneinfo/Asia/Yakutsk new file mode 100644 index 00000000..58ff25ea Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Yakutsk differ diff --git a/lib/pytz/zoneinfo/Asia/Yekaterinburg b/lib/pytz/zoneinfo/Asia/Yekaterinburg new file mode 100644 index 00000000..a1baafae Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Yekaterinburg differ diff --git a/lib/pytz/zoneinfo/Asia/Yerevan b/lib/pytz/zoneinfo/Asia/Yerevan new file mode 100644 index 00000000..fa62c249 Binary files /dev/null and b/lib/pytz/zoneinfo/Asia/Yerevan differ diff --git a/lib/pytz/zoneinfo/Atlantic/Azores b/lib/pytz/zoneinfo/Atlantic/Azores new file mode 100644 index 00000000..1f532532 Binary files /dev/null and b/lib/pytz/zoneinfo/Atlantic/Azores differ diff --git a/lib/pytz/zoneinfo/Atlantic/Bermuda b/lib/pytz/zoneinfo/Atlantic/Bermuda new file mode 100644 index 00000000..548d979b Binary files /dev/null and b/lib/pytz/zoneinfo/Atlantic/Bermuda differ diff --git a/lib/pytz/zoneinfo/Atlantic/Canary b/lib/pytz/zoneinfo/Atlantic/Canary new file mode 100644 index 00000000..007dcf49 Binary files /dev/null and b/lib/pytz/zoneinfo/Atlantic/Canary differ diff --git a/lib/pytz/zoneinfo/Atlantic/Cape_Verde b/lib/pytz/zoneinfo/Atlantic/Cape_Verde new file mode 100644 index 00000000..18b676ce Binary files /dev/null and b/lib/pytz/zoneinfo/Atlantic/Cape_Verde differ diff --git a/lib/pytz/zoneinfo/Atlantic/Faeroe b/lib/pytz/zoneinfo/Atlantic/Faeroe new file mode 100644 index 00000000..c4865186 Binary files /dev/null and b/lib/pytz/zoneinfo/Atlantic/Faeroe differ diff --git a/lib/pytz/zoneinfo/Atlantic/Faroe b/lib/pytz/zoneinfo/Atlantic/Faroe new file mode 100644 index 00000000..c4865186 Binary files /dev/null and b/lib/pytz/zoneinfo/Atlantic/Faroe differ diff --git a/lib/pytz/zoneinfo/Atlantic/Jan_Mayen b/lib/pytz/zoneinfo/Atlantic/Jan_Mayen new file mode 100644 index 00000000..239c0174 Binary files /dev/null and b/lib/pytz/zoneinfo/Atlantic/Jan_Mayen differ diff --git a/lib/pytz/zoneinfo/Atlantic/Madeira b/lib/pytz/zoneinfo/Atlantic/Madeira new file mode 100644 index 00000000..3687fd66 Binary files /dev/null and b/lib/pytz/zoneinfo/Atlantic/Madeira differ diff --git a/lib/pytz/zoneinfo/Atlantic/Reykjavik b/lib/pytz/zoneinfo/Atlantic/Reykjavik new file mode 100644 index 00000000..35ba7a15 Binary files /dev/null and b/lib/pytz/zoneinfo/Atlantic/Reykjavik differ diff --git a/lib/pytz/zoneinfo/Atlantic/South_Georgia b/lib/pytz/zoneinfo/Atlantic/South_Georgia new file mode 100644 index 00000000..b1191c9f Binary files /dev/null and b/lib/pytz/zoneinfo/Atlantic/South_Georgia differ diff --git a/lib/pytz/zoneinfo/Atlantic/St_Helena b/lib/pytz/zoneinfo/Atlantic/St_Helena new file mode 100644 index 00000000..6fd1af32 Binary files /dev/null and b/lib/pytz/zoneinfo/Atlantic/St_Helena differ diff --git a/lib/pytz/zoneinfo/Atlantic/Stanley b/lib/pytz/zoneinfo/Atlantic/Stanley new file mode 100644 index 00000000..aec7a5d3 Binary files /dev/null and b/lib/pytz/zoneinfo/Atlantic/Stanley differ diff --git a/lib/pytz/zoneinfo/Australia/ACT b/lib/pytz/zoneinfo/Australia/ACT new file mode 100644 index 00000000..aaed12ca Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/ACT differ diff --git a/lib/pytz/zoneinfo/Australia/Adelaide b/lib/pytz/zoneinfo/Australia/Adelaide new file mode 100644 index 00000000..4f331a87 Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/Adelaide differ diff --git a/lib/pytz/zoneinfo/Australia/Brisbane b/lib/pytz/zoneinfo/Australia/Brisbane new file mode 100644 index 00000000..a327d83b Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/Brisbane differ diff --git a/lib/pytz/zoneinfo/Australia/Broken_Hill b/lib/pytz/zoneinfo/Australia/Broken_Hill new file mode 100644 index 00000000..768b1678 Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/Broken_Hill differ diff --git a/lib/pytz/zoneinfo/Australia/Canberra b/lib/pytz/zoneinfo/Australia/Canberra new file mode 100644 index 00000000..aaed12ca Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/Canberra differ diff --git a/lib/pytz/zoneinfo/Australia/Currie b/lib/pytz/zoneinfo/Australia/Currie new file mode 100644 index 00000000..a3f6f29a Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/Currie differ diff --git a/lib/pytz/zoneinfo/Australia/Darwin b/lib/pytz/zoneinfo/Australia/Darwin new file mode 100644 index 00000000..c6ae9a7b Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/Darwin differ diff --git a/lib/pytz/zoneinfo/Australia/Eucla b/lib/pytz/zoneinfo/Australia/Eucla new file mode 100644 index 00000000..baba07a3 Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/Eucla differ diff --git a/lib/pytz/zoneinfo/Australia/Hobart b/lib/pytz/zoneinfo/Australia/Hobart new file mode 100644 index 00000000..07784ce5 Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/Hobart differ diff --git a/lib/pytz/zoneinfo/Australia/LHI b/lib/pytz/zoneinfo/Australia/LHI new file mode 100644 index 00000000..a653e516 Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/LHI differ diff --git a/lib/pytz/zoneinfo/Australia/Lindeman b/lib/pytz/zoneinfo/Australia/Lindeman new file mode 100644 index 00000000..71ca143f Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/Lindeman differ diff --git a/lib/pytz/zoneinfo/Australia/Lord_Howe b/lib/pytz/zoneinfo/Australia/Lord_Howe new file mode 100644 index 00000000..a653e516 Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/Lord_Howe differ diff --git a/lib/pytz/zoneinfo/Australia/Melbourne b/lib/pytz/zoneinfo/Australia/Melbourne new file mode 100644 index 00000000..ec8dfe03 Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/Melbourne differ diff --git a/lib/pytz/zoneinfo/Australia/NSW b/lib/pytz/zoneinfo/Australia/NSW new file mode 100644 index 00000000..aaed12ca Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/NSW differ diff --git a/lib/pytz/zoneinfo/Australia/North b/lib/pytz/zoneinfo/Australia/North new file mode 100644 index 00000000..c6ae9a7b Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/North differ diff --git a/lib/pytz/zoneinfo/Australia/Perth b/lib/pytz/zoneinfo/Australia/Perth new file mode 100644 index 00000000..85c26d50 Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/Perth differ diff --git a/lib/pytz/zoneinfo/Australia/Queensland b/lib/pytz/zoneinfo/Australia/Queensland new file mode 100644 index 00000000..a327d83b Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/Queensland differ diff --git a/lib/pytz/zoneinfo/Australia/South b/lib/pytz/zoneinfo/Australia/South new file mode 100644 index 00000000..4f331a87 Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/South differ diff --git a/lib/pytz/zoneinfo/Australia/Sydney b/lib/pytz/zoneinfo/Australia/Sydney new file mode 100644 index 00000000..aaed12ca Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/Sydney differ diff --git a/lib/pytz/zoneinfo/Australia/Tasmania b/lib/pytz/zoneinfo/Australia/Tasmania new file mode 100644 index 00000000..07784ce5 Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/Tasmania differ diff --git a/lib/pytz/zoneinfo/Australia/Victoria b/lib/pytz/zoneinfo/Australia/Victoria new file mode 100644 index 00000000..ec8dfe03 Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/Victoria differ diff --git a/lib/pytz/zoneinfo/Australia/West b/lib/pytz/zoneinfo/Australia/West new file mode 100644 index 00000000..85c26d50 Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/West differ diff --git a/lib/pytz/zoneinfo/Australia/Yancowinna b/lib/pytz/zoneinfo/Australia/Yancowinna new file mode 100644 index 00000000..768b1678 Binary files /dev/null and b/lib/pytz/zoneinfo/Australia/Yancowinna differ diff --git a/lib/pytz/zoneinfo/Brazil/Acre b/lib/pytz/zoneinfo/Brazil/Acre new file mode 100644 index 00000000..788d0e9c Binary files /dev/null and b/lib/pytz/zoneinfo/Brazil/Acre differ diff --git a/lib/pytz/zoneinfo/Brazil/DeNoronha b/lib/pytz/zoneinfo/Brazil/DeNoronha new file mode 100644 index 00000000..774b14e6 Binary files /dev/null and b/lib/pytz/zoneinfo/Brazil/DeNoronha differ diff --git a/lib/pytz/zoneinfo/Brazil/East b/lib/pytz/zoneinfo/Brazil/East new file mode 100644 index 00000000..552ce7c2 Binary files /dev/null and b/lib/pytz/zoneinfo/Brazil/East differ diff --git a/lib/pytz/zoneinfo/Brazil/West b/lib/pytz/zoneinfo/Brazil/West new file mode 100644 index 00000000..e0222f18 Binary files /dev/null and b/lib/pytz/zoneinfo/Brazil/West differ diff --git a/lib/pytz/zoneinfo/CET b/lib/pytz/zoneinfo/CET new file mode 100644 index 00000000..4c4f8ef9 Binary files /dev/null and b/lib/pytz/zoneinfo/CET differ diff --git a/lib/pytz/zoneinfo/CST6CDT b/lib/pytz/zoneinfo/CST6CDT new file mode 100644 index 00000000..5c8a1d9a Binary files /dev/null and b/lib/pytz/zoneinfo/CST6CDT differ diff --git a/lib/pytz/zoneinfo/Canada/Atlantic b/lib/pytz/zoneinfo/Canada/Atlantic new file mode 100644 index 00000000..f86ece4c Binary files /dev/null and b/lib/pytz/zoneinfo/Canada/Atlantic differ diff --git a/lib/pytz/zoneinfo/Canada/Central b/lib/pytz/zoneinfo/Canada/Central new file mode 100644 index 00000000..2ffe3d8d Binary files /dev/null and b/lib/pytz/zoneinfo/Canada/Central differ diff --git a/lib/pytz/zoneinfo/Canada/East-Saskatchewan b/lib/pytz/zoneinfo/Canada/East-Saskatchewan new file mode 100644 index 00000000..5fe8d6b6 Binary files /dev/null and b/lib/pytz/zoneinfo/Canada/East-Saskatchewan differ diff --git a/lib/pytz/zoneinfo/Canada/Eastern b/lib/pytz/zoneinfo/Canada/Eastern new file mode 100644 index 00000000..7b4682a3 Binary files /dev/null and b/lib/pytz/zoneinfo/Canada/Eastern differ diff --git a/lib/pytz/zoneinfo/Canada/Mountain b/lib/pytz/zoneinfo/Canada/Mountain new file mode 100644 index 00000000..d02fbcd4 Binary files /dev/null and b/lib/pytz/zoneinfo/Canada/Mountain differ diff --git a/lib/pytz/zoneinfo/Canada/Newfoundland b/lib/pytz/zoneinfo/Canada/Newfoundland new file mode 100644 index 00000000..a1d14854 Binary files /dev/null and b/lib/pytz/zoneinfo/Canada/Newfoundland differ diff --git a/lib/pytz/zoneinfo/Canada/Pacific b/lib/pytz/zoneinfo/Canada/Pacific new file mode 100644 index 00000000..9b5d9241 Binary files /dev/null and b/lib/pytz/zoneinfo/Canada/Pacific differ diff --git a/lib/pytz/zoneinfo/Canada/Saskatchewan b/lib/pytz/zoneinfo/Canada/Saskatchewan new file mode 100644 index 00000000..5fe8d6b6 Binary files /dev/null and b/lib/pytz/zoneinfo/Canada/Saskatchewan differ diff --git a/lib/pytz/zoneinfo/Canada/Yukon b/lib/pytz/zoneinfo/Canada/Yukon new file mode 100644 index 00000000..8604c5c5 Binary files /dev/null and b/lib/pytz/zoneinfo/Canada/Yukon differ diff --git a/lib/pytz/zoneinfo/Chile/Continental b/lib/pytz/zoneinfo/Chile/Continental new file mode 100644 index 00000000..92cf5597 Binary files /dev/null and b/lib/pytz/zoneinfo/Chile/Continental differ diff --git a/lib/pytz/zoneinfo/Chile/EasterIsland b/lib/pytz/zoneinfo/Chile/EasterIsland new file mode 100644 index 00000000..8c8a6c7d Binary files /dev/null and b/lib/pytz/zoneinfo/Chile/EasterIsland differ diff --git a/lib/pytz/zoneinfo/Cuba b/lib/pytz/zoneinfo/Cuba new file mode 100644 index 00000000..1a58fcdc Binary files /dev/null and b/lib/pytz/zoneinfo/Cuba differ diff --git a/lib/pytz/zoneinfo/EET b/lib/pytz/zoneinfo/EET new file mode 100644 index 00000000..beb273a2 Binary files /dev/null and b/lib/pytz/zoneinfo/EET differ diff --git a/lib/pytz/zoneinfo/EST b/lib/pytz/zoneinfo/EST new file mode 100644 index 00000000..ae346633 Binary files /dev/null and b/lib/pytz/zoneinfo/EST differ diff --git a/lib/pytz/zoneinfo/EST5EDT b/lib/pytz/zoneinfo/EST5EDT new file mode 100644 index 00000000..54541fc2 Binary files /dev/null and b/lib/pytz/zoneinfo/EST5EDT differ diff --git a/lib/pytz/zoneinfo/Egypt b/lib/pytz/zoneinfo/Egypt new file mode 100644 index 00000000..0eeed113 Binary files /dev/null and b/lib/pytz/zoneinfo/Egypt differ diff --git a/lib/pytz/zoneinfo/Eire b/lib/pytz/zoneinfo/Eire new file mode 100644 index 00000000..a7cffbbb Binary files /dev/null and b/lib/pytz/zoneinfo/Eire differ diff --git a/lib/pytz/zoneinfo/Etc/GMT b/lib/pytz/zoneinfo/Etc/GMT new file mode 100644 index 00000000..c05e45fd Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT differ diff --git a/lib/pytz/zoneinfo/Etc/GMT+0 b/lib/pytz/zoneinfo/Etc/GMT+0 new file mode 100644 index 00000000..c05e45fd Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT+0 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT+1 b/lib/pytz/zoneinfo/Etc/GMT+1 new file mode 100644 index 00000000..2f40cc76 Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT+1 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT+10 b/lib/pytz/zoneinfo/Etc/GMT+10 new file mode 100644 index 00000000..2087965e Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT+10 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT+11 b/lib/pytz/zoneinfo/Etc/GMT+11 new file mode 100644 index 00000000..af4a6b34 Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT+11 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT+12 b/lib/pytz/zoneinfo/Etc/GMT+12 new file mode 100644 index 00000000..f0955345 Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT+12 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT+2 b/lib/pytz/zoneinfo/Etc/GMT+2 new file mode 100644 index 00000000..85a1fc1d Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT+2 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT+3 b/lib/pytz/zoneinfo/Etc/GMT+3 new file mode 100644 index 00000000..a24f5870 Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT+3 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT+4 b/lib/pytz/zoneinfo/Etc/GMT+4 new file mode 100644 index 00000000..ab745174 Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT+4 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT+5 b/lib/pytz/zoneinfo/Etc/GMT+5 new file mode 100644 index 00000000..01f1d775 Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT+5 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT+6 b/lib/pytz/zoneinfo/Etc/GMT+6 new file mode 100644 index 00000000..3ced48bb Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT+6 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT+7 b/lib/pytz/zoneinfo/Etc/GMT+7 new file mode 100644 index 00000000..5f58127e Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT+7 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT+8 b/lib/pytz/zoneinfo/Etc/GMT+8 new file mode 100644 index 00000000..be23d966 Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT+8 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT+9 b/lib/pytz/zoneinfo/Etc/GMT+9 new file mode 100644 index 00000000..d00c50c5 Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT+9 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT-0 b/lib/pytz/zoneinfo/Etc/GMT-0 new file mode 100644 index 00000000..c05e45fd Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT-0 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT-1 b/lib/pytz/zoneinfo/Etc/GMT-1 new file mode 100644 index 00000000..088a76ed Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT-1 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT-10 b/lib/pytz/zoneinfo/Etc/GMT-10 new file mode 100644 index 00000000..a4da44f5 Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT-10 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT-11 b/lib/pytz/zoneinfo/Etc/GMT-11 new file mode 100644 index 00000000..e0112a9c Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT-11 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT-12 b/lib/pytz/zoneinfo/Etc/GMT-12 new file mode 100644 index 00000000..c1e08b77 Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT-12 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT-13 b/lib/pytz/zoneinfo/Etc/GMT-13 new file mode 100644 index 00000000..1ab05199 Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT-13 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT-14 b/lib/pytz/zoneinfo/Etc/GMT-14 new file mode 100644 index 00000000..afaf3fa9 Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT-14 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT-2 b/lib/pytz/zoneinfo/Etc/GMT-2 new file mode 100644 index 00000000..6289cad8 Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT-2 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT-3 b/lib/pytz/zoneinfo/Etc/GMT-3 new file mode 100644 index 00000000..27434cdb Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT-3 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT-4 b/lib/pytz/zoneinfo/Etc/GMT-4 new file mode 100644 index 00000000..2fc69663 Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT-4 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT-5 b/lib/pytz/zoneinfo/Etc/GMT-5 new file mode 100644 index 00000000..8508e723 Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT-5 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT-6 b/lib/pytz/zoneinfo/Etc/GMT-6 new file mode 100644 index 00000000..5b9678ea Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT-6 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT-7 b/lib/pytz/zoneinfo/Etc/GMT-7 new file mode 100644 index 00000000..ccf4c394 Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT-7 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT-8 b/lib/pytz/zoneinfo/Etc/GMT-8 new file mode 100644 index 00000000..db4cfa6a Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT-8 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT-9 b/lib/pytz/zoneinfo/Etc/GMT-9 new file mode 100644 index 00000000..56ea1174 Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT-9 differ diff --git a/lib/pytz/zoneinfo/Etc/GMT0 b/lib/pytz/zoneinfo/Etc/GMT0 new file mode 100644 index 00000000..c05e45fd Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/GMT0 differ diff --git a/lib/pytz/zoneinfo/Etc/Greenwich b/lib/pytz/zoneinfo/Etc/Greenwich new file mode 100644 index 00000000..c05e45fd Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/Greenwich differ diff --git a/lib/pytz/zoneinfo/Etc/UCT b/lib/pytz/zoneinfo/Etc/UCT new file mode 100644 index 00000000..40147b9e Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/UCT differ diff --git a/lib/pytz/zoneinfo/Etc/UTC b/lib/pytz/zoneinfo/Etc/UTC new file mode 100644 index 00000000..c3b97f1a Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/UTC differ diff --git a/lib/pytz/zoneinfo/Etc/Universal b/lib/pytz/zoneinfo/Etc/Universal new file mode 100644 index 00000000..c3b97f1a Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/Universal differ diff --git a/lib/pytz/zoneinfo/Etc/Zulu b/lib/pytz/zoneinfo/Etc/Zulu new file mode 100644 index 00000000..c3b97f1a Binary files /dev/null and b/lib/pytz/zoneinfo/Etc/Zulu differ diff --git a/lib/pytz/zoneinfo/Europe/Amsterdam b/lib/pytz/zoneinfo/Europe/Amsterdam new file mode 100644 index 00000000..f74769d4 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Amsterdam differ diff --git a/lib/pytz/zoneinfo/Europe/Andorra b/lib/pytz/zoneinfo/Europe/Andorra new file mode 100644 index 00000000..b06de7a5 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Andorra differ diff --git a/lib/pytz/zoneinfo/Europe/Athens b/lib/pytz/zoneinfo/Europe/Athens new file mode 100644 index 00000000..0001602f Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Athens differ diff --git a/lib/pytz/zoneinfo/Europe/Belfast b/lib/pytz/zoneinfo/Europe/Belfast new file mode 100644 index 00000000..4527515c Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Belfast differ diff --git a/lib/pytz/zoneinfo/Europe/Belgrade b/lib/pytz/zoneinfo/Europe/Belgrade new file mode 100644 index 00000000..79c25d70 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Belgrade differ diff --git a/lib/pytz/zoneinfo/Europe/Berlin b/lib/pytz/zoneinfo/Europe/Berlin new file mode 100644 index 00000000..b4f2a2af Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Berlin differ diff --git a/lib/pytz/zoneinfo/Europe/Bratislava b/lib/pytz/zoneinfo/Europe/Bratislava new file mode 100644 index 00000000..4eabe5c8 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Bratislava differ diff --git a/lib/pytz/zoneinfo/Europe/Brussels b/lib/pytz/zoneinfo/Europe/Brussels new file mode 100644 index 00000000..d8f19a63 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Brussels differ diff --git a/lib/pytz/zoneinfo/Europe/Bucharest b/lib/pytz/zoneinfo/Europe/Bucharest new file mode 100644 index 00000000..e0eac4ce Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Bucharest differ diff --git a/lib/pytz/zoneinfo/Europe/Budapest b/lib/pytz/zoneinfo/Europe/Budapest new file mode 100644 index 00000000..3ddf6a52 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Budapest differ diff --git a/lib/pytz/zoneinfo/Europe/Busingen b/lib/pytz/zoneinfo/Europe/Busingen new file mode 100644 index 00000000..9c2b600b Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Busingen differ diff --git a/lib/pytz/zoneinfo/Europe/Chisinau b/lib/pytz/zoneinfo/Europe/Chisinau new file mode 100644 index 00000000..7998b2d8 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Chisinau differ diff --git a/lib/pytz/zoneinfo/Europe/Copenhagen b/lib/pytz/zoneinfo/Europe/Copenhagen new file mode 100644 index 00000000..be87cf16 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Copenhagen differ diff --git a/lib/pytz/zoneinfo/Europe/Dublin b/lib/pytz/zoneinfo/Europe/Dublin new file mode 100644 index 00000000..a7cffbbb Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Dublin differ diff --git a/lib/pytz/zoneinfo/Europe/Gibraltar b/lib/pytz/zoneinfo/Europe/Gibraltar new file mode 100644 index 00000000..a7105faa Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Gibraltar differ diff --git a/lib/pytz/zoneinfo/Europe/Guernsey b/lib/pytz/zoneinfo/Europe/Guernsey new file mode 100644 index 00000000..4527515c Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Guernsey differ diff --git a/lib/pytz/zoneinfo/Europe/Helsinki b/lib/pytz/zoneinfo/Europe/Helsinki new file mode 100644 index 00000000..29b3c817 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Helsinki differ diff --git a/lib/pytz/zoneinfo/Europe/Isle_of_Man b/lib/pytz/zoneinfo/Europe/Isle_of_Man new file mode 100644 index 00000000..4527515c Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Isle_of_Man differ diff --git a/lib/pytz/zoneinfo/Europe/Istanbul b/lib/pytz/zoneinfo/Europe/Istanbul new file mode 100644 index 00000000..d89aa3a8 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Istanbul differ diff --git a/lib/pytz/zoneinfo/Europe/Jersey b/lib/pytz/zoneinfo/Europe/Jersey new file mode 100644 index 00000000..4527515c Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Jersey differ diff --git a/lib/pytz/zoneinfo/Europe/Kaliningrad b/lib/pytz/zoneinfo/Europe/Kaliningrad new file mode 100644 index 00000000..4805fe42 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Kaliningrad differ diff --git a/lib/pytz/zoneinfo/Europe/Kiev b/lib/pytz/zoneinfo/Europe/Kiev new file mode 100644 index 00000000..b3e20a7e Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Kiev differ diff --git a/lib/pytz/zoneinfo/Europe/Lisbon b/lib/pytz/zoneinfo/Europe/Lisbon new file mode 100644 index 00000000..b9aff3a5 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Lisbon differ diff --git a/lib/pytz/zoneinfo/Europe/Ljubljana b/lib/pytz/zoneinfo/Europe/Ljubljana new file mode 100644 index 00000000..79c25d70 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Ljubljana differ diff --git a/lib/pytz/zoneinfo/Europe/London b/lib/pytz/zoneinfo/Europe/London new file mode 100644 index 00000000..4527515c Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/London differ diff --git a/lib/pytz/zoneinfo/Europe/Luxembourg b/lib/pytz/zoneinfo/Europe/Luxembourg new file mode 100644 index 00000000..6fae86c5 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Luxembourg differ diff --git a/lib/pytz/zoneinfo/Europe/Madrid b/lib/pytz/zoneinfo/Europe/Madrid new file mode 100644 index 00000000..af474328 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Madrid differ diff --git a/lib/pytz/zoneinfo/Europe/Malta b/lib/pytz/zoneinfo/Europe/Malta new file mode 100644 index 00000000..d2519389 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Malta differ diff --git a/lib/pytz/zoneinfo/Europe/Mariehamn b/lib/pytz/zoneinfo/Europe/Mariehamn new file mode 100644 index 00000000..29b3c817 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Mariehamn differ diff --git a/lib/pytz/zoneinfo/Europe/Minsk b/lib/pytz/zoneinfo/Europe/Minsk new file mode 100644 index 00000000..28ef30a3 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Minsk differ diff --git a/lib/pytz/zoneinfo/Europe/Monaco b/lib/pytz/zoneinfo/Europe/Monaco new file mode 100644 index 00000000..0b40f1ec Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Monaco differ diff --git a/lib/pytz/zoneinfo/Europe/Moscow b/lib/pytz/zoneinfo/Europe/Moscow new file mode 100644 index 00000000..bdbbaebe Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Moscow differ diff --git a/lib/pytz/zoneinfo/Europe/Nicosia b/lib/pytz/zoneinfo/Europe/Nicosia new file mode 100644 index 00000000..3e663b21 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Nicosia differ diff --git a/lib/pytz/zoneinfo/Europe/Oslo b/lib/pytz/zoneinfo/Europe/Oslo new file mode 100644 index 00000000..239c0174 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Oslo differ diff --git a/lib/pytz/zoneinfo/Europe/Paris b/lib/pytz/zoneinfo/Europe/Paris new file mode 100644 index 00000000..cf6e2e2e Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Paris differ diff --git a/lib/pytz/zoneinfo/Europe/Podgorica b/lib/pytz/zoneinfo/Europe/Podgorica new file mode 100644 index 00000000..79c25d70 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Podgorica differ diff --git a/lib/pytz/zoneinfo/Europe/Prague b/lib/pytz/zoneinfo/Europe/Prague new file mode 100644 index 00000000..4eabe5c8 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Prague differ diff --git a/lib/pytz/zoneinfo/Europe/Riga b/lib/pytz/zoneinfo/Europe/Riga new file mode 100644 index 00000000..b729ee8c Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Riga differ diff --git a/lib/pytz/zoneinfo/Europe/Rome b/lib/pytz/zoneinfo/Europe/Rome new file mode 100644 index 00000000..5cc30403 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Rome differ diff --git a/lib/pytz/zoneinfo/Europe/Samara b/lib/pytz/zoneinfo/Europe/Samara new file mode 100644 index 00000000..79759f53 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Samara differ diff --git a/lib/pytz/zoneinfo/Europe/San_Marino b/lib/pytz/zoneinfo/Europe/San_Marino new file mode 100644 index 00000000..5cc30403 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/San_Marino differ diff --git a/lib/pytz/zoneinfo/Europe/Sarajevo b/lib/pytz/zoneinfo/Europe/Sarajevo new file mode 100644 index 00000000..79c25d70 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Sarajevo differ diff --git a/lib/pytz/zoneinfo/Europe/Simferopol b/lib/pytz/zoneinfo/Europe/Simferopol new file mode 100644 index 00000000..ebe9017d Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Simferopol differ diff --git a/lib/pytz/zoneinfo/Europe/Skopje b/lib/pytz/zoneinfo/Europe/Skopje new file mode 100644 index 00000000..79c25d70 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Skopje differ diff --git a/lib/pytz/zoneinfo/Europe/Sofia b/lib/pytz/zoneinfo/Europe/Sofia new file mode 100644 index 00000000..763e0747 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Sofia differ diff --git a/lib/pytz/zoneinfo/Europe/Stockholm b/lib/pytz/zoneinfo/Europe/Stockholm new file mode 100644 index 00000000..43c7f2e2 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Stockholm differ diff --git a/lib/pytz/zoneinfo/Europe/Tallinn b/lib/pytz/zoneinfo/Europe/Tallinn new file mode 100644 index 00000000..8a4f1240 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Tallinn differ diff --git a/lib/pytz/zoneinfo/Europe/Tirane b/lib/pytz/zoneinfo/Europe/Tirane new file mode 100644 index 00000000..52c16a42 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Tirane differ diff --git a/lib/pytz/zoneinfo/Europe/Tiraspol b/lib/pytz/zoneinfo/Europe/Tiraspol new file mode 100644 index 00000000..7998b2d8 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Tiraspol differ diff --git a/lib/pytz/zoneinfo/Europe/Uzhgorod b/lib/pytz/zoneinfo/Europe/Uzhgorod new file mode 100644 index 00000000..8ddba909 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Uzhgorod differ diff --git a/lib/pytz/zoneinfo/Europe/Vaduz b/lib/pytz/zoneinfo/Europe/Vaduz new file mode 100644 index 00000000..9c2b600b Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Vaduz differ diff --git a/lib/pytz/zoneinfo/Europe/Vatican b/lib/pytz/zoneinfo/Europe/Vatican new file mode 100644 index 00000000..5cc30403 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Vatican differ diff --git a/lib/pytz/zoneinfo/Europe/Vienna b/lib/pytz/zoneinfo/Europe/Vienna new file mode 100644 index 00000000..9c0fac53 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Vienna differ diff --git a/lib/pytz/zoneinfo/Europe/Vilnius b/lib/pytz/zoneinfo/Europe/Vilnius new file mode 100644 index 00000000..3b11880d Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Vilnius differ diff --git a/lib/pytz/zoneinfo/Europe/Volgograd b/lib/pytz/zoneinfo/Europe/Volgograd new file mode 100644 index 00000000..c62c32a6 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Volgograd differ diff --git a/lib/pytz/zoneinfo/Europe/Warsaw b/lib/pytz/zoneinfo/Europe/Warsaw new file mode 100644 index 00000000..5cbba412 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Warsaw differ diff --git a/lib/pytz/zoneinfo/Europe/Zagreb b/lib/pytz/zoneinfo/Europe/Zagreb new file mode 100644 index 00000000..79c25d70 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Zagreb differ diff --git a/lib/pytz/zoneinfo/Europe/Zaporozhye b/lib/pytz/zoneinfo/Europe/Zaporozhye new file mode 100644 index 00000000..49b568e7 Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Zaporozhye differ diff --git a/lib/pytz/zoneinfo/Europe/Zurich b/lib/pytz/zoneinfo/Europe/Zurich new file mode 100644 index 00000000..9c2b600b Binary files /dev/null and b/lib/pytz/zoneinfo/Europe/Zurich differ diff --git a/lib/pytz/zoneinfo/Factory b/lib/pytz/zoneinfo/Factory new file mode 100644 index 00000000..6e6c452e Binary files /dev/null and b/lib/pytz/zoneinfo/Factory differ diff --git a/lib/pytz/zoneinfo/GB b/lib/pytz/zoneinfo/GB new file mode 100644 index 00000000..4527515c Binary files /dev/null and b/lib/pytz/zoneinfo/GB differ diff --git a/lib/pytz/zoneinfo/GB-Eire b/lib/pytz/zoneinfo/GB-Eire new file mode 100644 index 00000000..4527515c Binary files /dev/null and b/lib/pytz/zoneinfo/GB-Eire differ diff --git a/lib/pytz/zoneinfo/GMT b/lib/pytz/zoneinfo/GMT new file mode 100644 index 00000000..c05e45fd Binary files /dev/null and b/lib/pytz/zoneinfo/GMT differ diff --git a/lib/pytz/zoneinfo/GMT+0 b/lib/pytz/zoneinfo/GMT+0 new file mode 100644 index 00000000..c05e45fd Binary files /dev/null and b/lib/pytz/zoneinfo/GMT+0 differ diff --git a/lib/pytz/zoneinfo/GMT-0 b/lib/pytz/zoneinfo/GMT-0 new file mode 100644 index 00000000..c05e45fd Binary files /dev/null and b/lib/pytz/zoneinfo/GMT-0 differ diff --git a/lib/pytz/zoneinfo/GMT0 b/lib/pytz/zoneinfo/GMT0 new file mode 100644 index 00000000..c05e45fd Binary files /dev/null and b/lib/pytz/zoneinfo/GMT0 differ diff --git a/lib/pytz/zoneinfo/Greenwich b/lib/pytz/zoneinfo/Greenwich new file mode 100644 index 00000000..c05e45fd Binary files /dev/null and b/lib/pytz/zoneinfo/Greenwich differ diff --git a/lib/pytz/zoneinfo/HST b/lib/pytz/zoneinfo/HST new file mode 100644 index 00000000..03e4db07 Binary files /dev/null and b/lib/pytz/zoneinfo/HST differ diff --git a/lib/pytz/zoneinfo/Hongkong b/lib/pytz/zoneinfo/Hongkong new file mode 100644 index 00000000..dc9058e4 Binary files /dev/null and b/lib/pytz/zoneinfo/Hongkong differ diff --git a/lib/pytz/zoneinfo/Iceland b/lib/pytz/zoneinfo/Iceland new file mode 100644 index 00000000..35ba7a15 Binary files /dev/null and b/lib/pytz/zoneinfo/Iceland differ diff --git a/lib/pytz/zoneinfo/Indian/Antananarivo b/lib/pytz/zoneinfo/Indian/Antananarivo new file mode 100644 index 00000000..750d3dc1 Binary files /dev/null and b/lib/pytz/zoneinfo/Indian/Antananarivo differ diff --git a/lib/pytz/zoneinfo/Indian/Chagos b/lib/pytz/zoneinfo/Indian/Chagos new file mode 100644 index 00000000..a616bdfb Binary files /dev/null and b/lib/pytz/zoneinfo/Indian/Chagos differ diff --git a/lib/pytz/zoneinfo/Indian/Christmas b/lib/pytz/zoneinfo/Indian/Christmas new file mode 100644 index 00000000..ebcd2624 Binary files /dev/null and b/lib/pytz/zoneinfo/Indian/Christmas differ diff --git a/lib/pytz/zoneinfo/Indian/Cocos b/lib/pytz/zoneinfo/Indian/Cocos new file mode 100644 index 00000000..cd603f24 Binary files /dev/null and b/lib/pytz/zoneinfo/Indian/Cocos differ diff --git a/lib/pytz/zoneinfo/Indian/Comoro b/lib/pytz/zoneinfo/Indian/Comoro new file mode 100644 index 00000000..750d3dc1 Binary files /dev/null and b/lib/pytz/zoneinfo/Indian/Comoro differ diff --git a/lib/pytz/zoneinfo/Indian/Kerguelen b/lib/pytz/zoneinfo/Indian/Kerguelen new file mode 100644 index 00000000..462851eb Binary files /dev/null and b/lib/pytz/zoneinfo/Indian/Kerguelen differ diff --git a/lib/pytz/zoneinfo/Indian/Mahe b/lib/pytz/zoneinfo/Indian/Mahe new file mode 100644 index 00000000..5f42819b Binary files /dev/null and b/lib/pytz/zoneinfo/Indian/Mahe differ diff --git a/lib/pytz/zoneinfo/Indian/Maldives b/lib/pytz/zoneinfo/Indian/Maldives new file mode 100644 index 00000000..cec224ff Binary files /dev/null and b/lib/pytz/zoneinfo/Indian/Maldives differ diff --git a/lib/pytz/zoneinfo/Indian/Mauritius b/lib/pytz/zoneinfo/Indian/Mauritius new file mode 100644 index 00000000..66ecc8f5 Binary files /dev/null and b/lib/pytz/zoneinfo/Indian/Mauritius differ diff --git a/lib/pytz/zoneinfo/Indian/Mayotte b/lib/pytz/zoneinfo/Indian/Mayotte new file mode 100644 index 00000000..750d3dc1 Binary files /dev/null and b/lib/pytz/zoneinfo/Indian/Mayotte differ diff --git a/lib/pytz/zoneinfo/Indian/Reunion b/lib/pytz/zoneinfo/Indian/Reunion new file mode 100644 index 00000000..c4d0da90 Binary files /dev/null and b/lib/pytz/zoneinfo/Indian/Reunion differ diff --git a/lib/pytz/zoneinfo/Iran b/lib/pytz/zoneinfo/Iran new file mode 100644 index 00000000..87107811 Binary files /dev/null and b/lib/pytz/zoneinfo/Iran differ diff --git a/lib/pytz/zoneinfo/Israel b/lib/pytz/zoneinfo/Israel new file mode 100644 index 00000000..df511993 Binary files /dev/null and b/lib/pytz/zoneinfo/Israel differ diff --git a/lib/pytz/zoneinfo/Jamaica b/lib/pytz/zoneinfo/Jamaica new file mode 100644 index 00000000..006689bc Binary files /dev/null and b/lib/pytz/zoneinfo/Jamaica differ diff --git a/lib/pytz/zoneinfo/Japan b/lib/pytz/zoneinfo/Japan new file mode 100644 index 00000000..02441403 Binary files /dev/null and b/lib/pytz/zoneinfo/Japan differ diff --git a/lib/pytz/zoneinfo/Kwajalein b/lib/pytz/zoneinfo/Kwajalein new file mode 100644 index 00000000..094c3cfd Binary files /dev/null and b/lib/pytz/zoneinfo/Kwajalein differ diff --git a/lib/pytz/zoneinfo/Libya b/lib/pytz/zoneinfo/Libya new file mode 100644 index 00000000..b32e2202 Binary files /dev/null and b/lib/pytz/zoneinfo/Libya differ diff --git a/lib/pytz/zoneinfo/MET b/lib/pytz/zoneinfo/MET new file mode 100644 index 00000000..71963d53 Binary files /dev/null and b/lib/pytz/zoneinfo/MET differ diff --git a/lib/pytz/zoneinfo/MST b/lib/pytz/zoneinfo/MST new file mode 100644 index 00000000..a1bee7c6 Binary files /dev/null and b/lib/pytz/zoneinfo/MST differ diff --git a/lib/pytz/zoneinfo/MST7MDT b/lib/pytz/zoneinfo/MST7MDT new file mode 100644 index 00000000..726a7e57 Binary files /dev/null and b/lib/pytz/zoneinfo/MST7MDT differ diff --git a/lib/pytz/zoneinfo/Mexico/BajaNorte b/lib/pytz/zoneinfo/Mexico/BajaNorte new file mode 100644 index 00000000..13874753 Binary files /dev/null and b/lib/pytz/zoneinfo/Mexico/BajaNorte differ diff --git a/lib/pytz/zoneinfo/Mexico/BajaSur b/lib/pytz/zoneinfo/Mexico/BajaSur new file mode 100644 index 00000000..afa94c2a Binary files /dev/null and b/lib/pytz/zoneinfo/Mexico/BajaSur differ diff --git a/lib/pytz/zoneinfo/Mexico/General b/lib/pytz/zoneinfo/Mexico/General new file mode 100644 index 00000000..f11e3d2d Binary files /dev/null and b/lib/pytz/zoneinfo/Mexico/General differ diff --git a/lib/pytz/zoneinfo/NZ b/lib/pytz/zoneinfo/NZ new file mode 100644 index 00000000..a5f5b6d5 Binary files /dev/null and b/lib/pytz/zoneinfo/NZ differ diff --git a/lib/pytz/zoneinfo/NZ-CHAT b/lib/pytz/zoneinfo/NZ-CHAT new file mode 100644 index 00000000..59bc4ede Binary files /dev/null and b/lib/pytz/zoneinfo/NZ-CHAT differ diff --git a/lib/pytz/zoneinfo/Navajo b/lib/pytz/zoneinfo/Navajo new file mode 100644 index 00000000..7fc66917 Binary files /dev/null and b/lib/pytz/zoneinfo/Navajo differ diff --git a/lib/pytz/zoneinfo/PRC b/lib/pytz/zoneinfo/PRC new file mode 100644 index 00000000..dbd132f2 Binary files /dev/null and b/lib/pytz/zoneinfo/PRC differ diff --git a/lib/pytz/zoneinfo/PST8PDT b/lib/pytz/zoneinfo/PST8PDT new file mode 100644 index 00000000..6242ac04 Binary files /dev/null and b/lib/pytz/zoneinfo/PST8PDT differ diff --git a/lib/pytz/zoneinfo/Pacific/Apia b/lib/pytz/zoneinfo/Pacific/Apia new file mode 100644 index 00000000..cc5d2cd2 Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Apia differ diff --git a/lib/pytz/zoneinfo/Pacific/Auckland b/lib/pytz/zoneinfo/Pacific/Auckland new file mode 100644 index 00000000..a5f5b6d5 Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Auckland differ diff --git a/lib/pytz/zoneinfo/Pacific/Bougainville b/lib/pytz/zoneinfo/Pacific/Bougainville new file mode 100644 index 00000000..219c78cd Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Bougainville differ diff --git a/lib/pytz/zoneinfo/Pacific/Chatham b/lib/pytz/zoneinfo/Pacific/Chatham new file mode 100644 index 00000000..59bc4ede Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Chatham differ diff --git a/lib/pytz/zoneinfo/Pacific/Chuuk b/lib/pytz/zoneinfo/Pacific/Chuuk new file mode 100644 index 00000000..28356bbf Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Chuuk differ diff --git a/lib/pytz/zoneinfo/Pacific/Easter b/lib/pytz/zoneinfo/Pacific/Easter new file mode 100644 index 00000000..8c8a6c7d Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Easter differ diff --git a/lib/pytz/zoneinfo/Pacific/Efate b/lib/pytz/zoneinfo/Pacific/Efate new file mode 100644 index 00000000..1d99519b Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Efate differ diff --git a/lib/pytz/zoneinfo/Pacific/Enderbury b/lib/pytz/zoneinfo/Pacific/Enderbury new file mode 100644 index 00000000..48610523 Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Enderbury differ diff --git a/lib/pytz/zoneinfo/Pacific/Fakaofo b/lib/pytz/zoneinfo/Pacific/Fakaofo new file mode 100644 index 00000000..e02e18e2 Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Fakaofo differ diff --git a/lib/pytz/zoneinfo/Pacific/Fiji b/lib/pytz/zoneinfo/Pacific/Fiji new file mode 100644 index 00000000..b75f194e Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Fiji differ diff --git a/lib/pytz/zoneinfo/Pacific/Funafuti b/lib/pytz/zoneinfo/Pacific/Funafuti new file mode 100644 index 00000000..576dea30 Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Funafuti differ diff --git a/lib/pytz/zoneinfo/Pacific/Galapagos b/lib/pytz/zoneinfo/Pacific/Galapagos new file mode 100644 index 00000000..c9a7371d Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Galapagos differ diff --git a/lib/pytz/zoneinfo/Pacific/Gambier b/lib/pytz/zoneinfo/Pacific/Gambier new file mode 100644 index 00000000..4ab6c206 Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Gambier differ diff --git a/lib/pytz/zoneinfo/Pacific/Guadalcanal b/lib/pytz/zoneinfo/Pacific/Guadalcanal new file mode 100644 index 00000000..b183d1ea Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Guadalcanal differ diff --git a/lib/pytz/zoneinfo/Pacific/Guam b/lib/pytz/zoneinfo/Pacific/Guam new file mode 100644 index 00000000..4286e6ba Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Guam differ diff --git a/lib/pytz/zoneinfo/Pacific/Honolulu b/lib/pytz/zoneinfo/Pacific/Honolulu new file mode 100644 index 00000000..bd855772 Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Honolulu differ diff --git a/lib/pytz/zoneinfo/Pacific/Johnston b/lib/pytz/zoneinfo/Pacific/Johnston new file mode 100644 index 00000000..bd855772 Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Johnston differ diff --git a/lib/pytz/zoneinfo/Pacific/Kiritimati b/lib/pytz/zoneinfo/Pacific/Kiritimati new file mode 100644 index 00000000..c2eafbc7 Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Kiritimati differ diff --git a/lib/pytz/zoneinfo/Pacific/Kosrae b/lib/pytz/zoneinfo/Pacific/Kosrae new file mode 100644 index 00000000..66c4d658 Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Kosrae differ diff --git a/lib/pytz/zoneinfo/Pacific/Kwajalein b/lib/pytz/zoneinfo/Pacific/Kwajalein new file mode 100644 index 00000000..094c3cfd Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Kwajalein differ diff --git a/lib/pytz/zoneinfo/Pacific/Majuro b/lib/pytz/zoneinfo/Pacific/Majuro new file mode 100644 index 00000000..d53b7c2d Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Majuro differ diff --git a/lib/pytz/zoneinfo/Pacific/Marquesas b/lib/pytz/zoneinfo/Pacific/Marquesas new file mode 100644 index 00000000..c717c122 Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Marquesas differ diff --git a/lib/pytz/zoneinfo/Pacific/Midway b/lib/pytz/zoneinfo/Pacific/Midway new file mode 100644 index 00000000..f2a2f63c Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Midway differ diff --git a/lib/pytz/zoneinfo/Pacific/Nauru b/lib/pytz/zoneinfo/Pacific/Nauru new file mode 100644 index 00000000..896ffeee Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Nauru differ diff --git a/lib/pytz/zoneinfo/Pacific/Niue b/lib/pytz/zoneinfo/Pacific/Niue new file mode 100644 index 00000000..d772edf5 Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Niue differ diff --git a/lib/pytz/zoneinfo/Pacific/Norfolk b/lib/pytz/zoneinfo/Pacific/Norfolk new file mode 100644 index 00000000..3a286be3 Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Norfolk differ diff --git a/lib/pytz/zoneinfo/Pacific/Noumea b/lib/pytz/zoneinfo/Pacific/Noumea new file mode 100644 index 00000000..fcc44e60 Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Noumea differ diff --git a/lib/pytz/zoneinfo/Pacific/Pago_Pago b/lib/pytz/zoneinfo/Pacific/Pago_Pago new file mode 100644 index 00000000..1d7649ff Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Pago_Pago differ diff --git a/lib/pytz/zoneinfo/Pacific/Palau b/lib/pytz/zoneinfo/Pacific/Palau new file mode 100644 index 00000000..28992d2d Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Palau differ diff --git a/lib/pytz/zoneinfo/Pacific/Pitcairn b/lib/pytz/zoneinfo/Pacific/Pitcairn new file mode 100644 index 00000000..d62c648b Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Pitcairn differ diff --git a/lib/pytz/zoneinfo/Pacific/Pohnpei b/lib/pytz/zoneinfo/Pacific/Pohnpei new file mode 100644 index 00000000..59bd7646 Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Pohnpei differ diff --git a/lib/pytz/zoneinfo/Pacific/Ponape b/lib/pytz/zoneinfo/Pacific/Ponape new file mode 100644 index 00000000..59bd7646 Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Ponape differ diff --git a/lib/pytz/zoneinfo/Pacific/Port_Moresby b/lib/pytz/zoneinfo/Pacific/Port_Moresby new file mode 100644 index 00000000..dffa4573 Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Port_Moresby differ diff --git a/lib/pytz/zoneinfo/Pacific/Rarotonga b/lib/pytz/zoneinfo/Pacific/Rarotonga new file mode 100644 index 00000000..2a254902 Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Rarotonga differ diff --git a/lib/pytz/zoneinfo/Pacific/Saipan b/lib/pytz/zoneinfo/Pacific/Saipan new file mode 100644 index 00000000..c54473cd Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Saipan differ diff --git a/lib/pytz/zoneinfo/Pacific/Samoa b/lib/pytz/zoneinfo/Pacific/Samoa new file mode 100644 index 00000000..1d7649ff Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Samoa differ diff --git a/lib/pytz/zoneinfo/Pacific/Tahiti b/lib/pytz/zoneinfo/Pacific/Tahiti new file mode 100644 index 00000000..bfc9a7c9 Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Tahiti differ diff --git a/lib/pytz/zoneinfo/Pacific/Tarawa b/lib/pytz/zoneinfo/Pacific/Tarawa new file mode 100644 index 00000000..1e8189ce Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Tarawa differ diff --git a/lib/pytz/zoneinfo/Pacific/Tongatapu b/lib/pytz/zoneinfo/Pacific/Tongatapu new file mode 100644 index 00000000..71d899bb Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Tongatapu differ diff --git a/lib/pytz/zoneinfo/Pacific/Truk b/lib/pytz/zoneinfo/Pacific/Truk new file mode 100644 index 00000000..28356bbf Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Truk differ diff --git a/lib/pytz/zoneinfo/Pacific/Wake b/lib/pytz/zoneinfo/Pacific/Wake new file mode 100644 index 00000000..9e2a37cc Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Wake differ diff --git a/lib/pytz/zoneinfo/Pacific/Wallis b/lib/pytz/zoneinfo/Pacific/Wallis new file mode 100644 index 00000000..b8944715 Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Wallis differ diff --git a/lib/pytz/zoneinfo/Pacific/Yap b/lib/pytz/zoneinfo/Pacific/Yap new file mode 100644 index 00000000..28356bbf Binary files /dev/null and b/lib/pytz/zoneinfo/Pacific/Yap differ diff --git a/lib/pytz/zoneinfo/Poland b/lib/pytz/zoneinfo/Poland new file mode 100644 index 00000000..5cbba412 Binary files /dev/null and b/lib/pytz/zoneinfo/Poland differ diff --git a/lib/pytz/zoneinfo/Portugal b/lib/pytz/zoneinfo/Portugal new file mode 100644 index 00000000..b9aff3a5 Binary files /dev/null and b/lib/pytz/zoneinfo/Portugal differ diff --git a/lib/pytz/zoneinfo/ROC b/lib/pytz/zoneinfo/ROC new file mode 100644 index 00000000..4810a0b6 Binary files /dev/null and b/lib/pytz/zoneinfo/ROC differ diff --git a/lib/pytz/zoneinfo/ROK b/lib/pytz/zoneinfo/ROK new file mode 100644 index 00000000..fd91d5b7 Binary files /dev/null and b/lib/pytz/zoneinfo/ROK differ diff --git a/lib/pytz/zoneinfo/Singapore b/lib/pytz/zoneinfo/Singapore new file mode 100644 index 00000000..9dd49cb7 Binary files /dev/null and b/lib/pytz/zoneinfo/Singapore differ diff --git a/lib/pytz/zoneinfo/Turkey b/lib/pytz/zoneinfo/Turkey new file mode 100644 index 00000000..d89aa3a8 Binary files /dev/null and b/lib/pytz/zoneinfo/Turkey differ diff --git a/lib/pytz/zoneinfo/UCT b/lib/pytz/zoneinfo/UCT new file mode 100644 index 00000000..40147b9e Binary files /dev/null and b/lib/pytz/zoneinfo/UCT differ diff --git a/lib/pytz/zoneinfo/US/Alaska b/lib/pytz/zoneinfo/US/Alaska new file mode 100644 index 00000000..a4627cac Binary files /dev/null and b/lib/pytz/zoneinfo/US/Alaska differ diff --git a/lib/pytz/zoneinfo/US/Aleutian b/lib/pytz/zoneinfo/US/Aleutian new file mode 100644 index 00000000..b0a5dd60 Binary files /dev/null and b/lib/pytz/zoneinfo/US/Aleutian differ diff --git a/lib/pytz/zoneinfo/US/Arizona b/lib/pytz/zoneinfo/US/Arizona new file mode 100644 index 00000000..adf28236 Binary files /dev/null and b/lib/pytz/zoneinfo/US/Arizona differ diff --git a/lib/pytz/zoneinfo/US/Central b/lib/pytz/zoneinfo/US/Central new file mode 100644 index 00000000..3dd8f0fa Binary files /dev/null and b/lib/pytz/zoneinfo/US/Central differ diff --git a/lib/pytz/zoneinfo/US/East-Indiana b/lib/pytz/zoneinfo/US/East-Indiana new file mode 100644 index 00000000..4a92c065 Binary files /dev/null and b/lib/pytz/zoneinfo/US/East-Indiana differ diff --git a/lib/pytz/zoneinfo/US/Eastern b/lib/pytz/zoneinfo/US/Eastern new file mode 100644 index 00000000..7553fee3 Binary files /dev/null and b/lib/pytz/zoneinfo/US/Eastern differ diff --git a/lib/pytz/zoneinfo/US/Hawaii b/lib/pytz/zoneinfo/US/Hawaii new file mode 100644 index 00000000..bd855772 Binary files /dev/null and b/lib/pytz/zoneinfo/US/Hawaii differ diff --git a/lib/pytz/zoneinfo/US/Indiana-Starke b/lib/pytz/zoneinfo/US/Indiana-Starke new file mode 100644 index 00000000..cc785da9 Binary files /dev/null and b/lib/pytz/zoneinfo/US/Indiana-Starke differ diff --git a/lib/pytz/zoneinfo/US/Michigan b/lib/pytz/zoneinfo/US/Michigan new file mode 100644 index 00000000..a123b331 Binary files /dev/null and b/lib/pytz/zoneinfo/US/Michigan differ diff --git a/lib/pytz/zoneinfo/US/Mountain b/lib/pytz/zoneinfo/US/Mountain new file mode 100644 index 00000000..7fc66917 Binary files /dev/null and b/lib/pytz/zoneinfo/US/Mountain differ diff --git a/lib/pytz/zoneinfo/US/Pacific b/lib/pytz/zoneinfo/US/Pacific new file mode 100644 index 00000000..1fa9149f Binary files /dev/null and b/lib/pytz/zoneinfo/US/Pacific differ diff --git a/lib/pytz/zoneinfo/US/Pacific-New b/lib/pytz/zoneinfo/US/Pacific-New new file mode 100644 index 00000000..1fa9149f Binary files /dev/null and b/lib/pytz/zoneinfo/US/Pacific-New differ diff --git a/lib/pytz/zoneinfo/US/Samoa b/lib/pytz/zoneinfo/US/Samoa new file mode 100644 index 00000000..1d7649ff Binary files /dev/null and b/lib/pytz/zoneinfo/US/Samoa differ diff --git a/lib/pytz/zoneinfo/UTC b/lib/pytz/zoneinfo/UTC new file mode 100644 index 00000000..c3b97f1a Binary files /dev/null and b/lib/pytz/zoneinfo/UTC differ diff --git a/lib/pytz/zoneinfo/Universal b/lib/pytz/zoneinfo/Universal new file mode 100644 index 00000000..c3b97f1a Binary files /dev/null and b/lib/pytz/zoneinfo/Universal differ diff --git a/lib/pytz/zoneinfo/W-SU b/lib/pytz/zoneinfo/W-SU new file mode 100644 index 00000000..bdbbaebe Binary files /dev/null and b/lib/pytz/zoneinfo/W-SU differ diff --git a/lib/pytz/zoneinfo/WET b/lib/pytz/zoneinfo/WET new file mode 100644 index 00000000..444a1933 Binary files /dev/null and b/lib/pytz/zoneinfo/WET differ diff --git a/lib/pytz/zoneinfo/Zulu b/lib/pytz/zoneinfo/Zulu new file mode 100644 index 00000000..c3b97f1a Binary files /dev/null and b/lib/pytz/zoneinfo/Zulu differ diff --git a/lib/pytz/zoneinfo/iso3166.tab b/lib/pytz/zoneinfo/iso3166.tab new file mode 100644 index 00000000..0b0b8426 --- /dev/null +++ b/lib/pytz/zoneinfo/iso3166.tab @@ -0,0 +1,275 @@ +# ISO 3166 alpha-2 country codes +# +# This file is in the public domain, so clarified as of +# 2009-05-17 by Arthur David Olson. +# +# From Paul Eggert (2014-07-18): +# This file contains a table of two-letter country codes. Columns are +# separated by a single tab. Lines beginning with '#' are comments. +# Although all text currently uses ASCII encoding, this is planned to +# change to UTF-8 soon. The columns of the table are as follows: +# +# 1. ISO 3166-1 alpha-2 country code, current as of +# ISO 3166-1 Newsletter VI-16 (2013-07-11). See: Updates on ISO 3166 +# http://www.iso.org/iso/home/standards/country_codes/updates_on_iso_3166.htm +# 2. The usual English name for the coded region, +# chosen so that alphabetic sorting of subsets produces helpful lists. +# This is not the same as the English name in the ISO 3166 tables. +# +# The table is sorted by country code. +# +# This table is intended as an aid for users, to help them select time +# zone data appropriate for their practical needs. It is not intended +# to take or endorse any position on legal or territorial claims. +# +#country- +#code name of country, territory, area, or subdivision +AD Andorra +AE United Arab Emirates +AF Afghanistan +AG Antigua & Barbuda +AI Anguilla +AL Albania +AM Armenia +AO Angola +AQ Antarctica +AR Argentina +AS Samoa (American) +AT Austria +AU Australia +AW Aruba +AX Aaland Islands +AZ Azerbaijan +BA Bosnia & Herzegovina +BB Barbados +BD Bangladesh +BE Belgium +BF Burkina Faso +BG Bulgaria +BH Bahrain +BI Burundi +BJ Benin +BL St Barthelemy +BM Bermuda +BN Brunei +BO Bolivia +BQ Caribbean Netherlands +BR Brazil +BS Bahamas +BT Bhutan +BV Bouvet Island +BW Botswana +BY Belarus +BZ Belize +CA Canada +CC Cocos (Keeling) Islands +CD Congo (Dem. Rep.) +CF Central African Rep. +CG Congo (Rep.) +CH Switzerland +CI Cote d'Ivoire +CK Cook Islands +CL Chile +CM Cameroon +CN China +CO Colombia +CR Costa Rica +CU Cuba +CV Cape Verde +CW Curacao +CX Christmas Island +CY Cyprus +CZ Czech Republic +DE Germany +DJ Djibouti +DK Denmark +DM Dominica +DO Dominican Republic +DZ Algeria +EC Ecuador +EE Estonia +EG Egypt +EH Western Sahara +ER Eritrea +ES Spain +ET Ethiopia +FI Finland +FJ Fiji +FK Falkland Islands +FM Micronesia +FO Faroe Islands +FR France +GA Gabon +GB Britain (UK) +GD Grenada +GE Georgia +GF French Guiana +GG Guernsey +GH Ghana +GI Gibraltar +GL Greenland +GM Gambia +GN Guinea +GP Guadeloupe +GQ Equatorial Guinea +GR Greece +GS South Georgia & the South Sandwich Islands +GT Guatemala +GU Guam +GW Guinea-Bissau +GY Guyana +HK Hong Kong +HM Heard Island & McDonald Islands +HN Honduras +HR Croatia +HT Haiti +HU Hungary +ID Indonesia +IE Ireland +IL Israel +IM Isle of Man +IN India +IO British Indian Ocean Territory +IQ Iraq +IR Iran +IS Iceland +IT Italy +JE Jersey +JM Jamaica +JO Jordan +JP Japan +KE Kenya +KG Kyrgyzstan +KH Cambodia +KI Kiribati +KM Comoros +KN St Kitts & Nevis +KP Korea (North) +KR Korea (South) +KW Kuwait +KY Cayman Islands +KZ Kazakhstan +LA Laos +LB Lebanon +LC St Lucia +LI Liechtenstein +LK Sri Lanka +LR Liberia +LS Lesotho +LT Lithuania +LU Luxembourg +LV Latvia +LY Libya +MA Morocco +MC Monaco +MD Moldova +ME Montenegro +MF St Martin (French part) +MG Madagascar +MH Marshall Islands +MK Macedonia +ML Mali +MM Myanmar (Burma) +MN Mongolia +MO Macau +MP Northern Mariana Islands +MQ Martinique +MR Mauritania +MS Montserrat +MT Malta +MU Mauritius +MV Maldives +MW Malawi +MX Mexico +MY Malaysia +MZ Mozambique +NA Namibia +NC New Caledonia +NE Niger +NF Norfolk Island +NG Nigeria +NI Nicaragua +NL Netherlands +NO Norway +NP Nepal +NR Nauru +NU Niue +NZ New Zealand +OM Oman +PA Panama +PE Peru +PF French Polynesia +PG Papua New Guinea +PH Philippines +PK Pakistan +PL Poland +PM St Pierre & Miquelon +PN Pitcairn +PR Puerto Rico +PS Palestine +PT Portugal +PW Palau +PY Paraguay +QA Qatar +RE Reunion +RO Romania +RS Serbia +RU Russia +RW Rwanda +SA Saudi Arabia +SB Solomon Islands +SC Seychelles +SD Sudan +SE Sweden +SG Singapore +SH St Helena +SI Slovenia +SJ Svalbard & Jan Mayen +SK Slovakia +SL Sierra Leone +SM San Marino +SN Senegal +SO Somalia +SR Suriname +SS South Sudan +ST Sao Tome & Principe +SV El Salvador +SX St Maarten (Dutch part) +SY Syria +SZ Swaziland +TC Turks & Caicos Is +TD Chad +TF French Southern & Antarctic Lands +TG Togo +TH Thailand +TJ Tajikistan +TK Tokelau +TL East Timor +TM Turkmenistan +TN Tunisia +TO Tonga +TR Turkey +TT Trinidad & Tobago +TV Tuvalu +TW Taiwan +TZ Tanzania +UA Ukraine +UG Uganda +UM US minor outlying islands +US United States +UY Uruguay +UZ Uzbekistan +VA Vatican City +VC St Vincent +VE Venezuela +VG Virgin Islands (UK) +VI Virgin Islands (US) +VN Vietnam +VU Vanuatu +WF Wallis & Futuna +WS Samoa (western) +YE Yemen +YT Mayotte +ZA South Africa +ZM Zambia +ZW Zimbabwe diff --git a/lib/pytz/zoneinfo/localtime b/lib/pytz/zoneinfo/localtime new file mode 100644 index 00000000..c05e45fd Binary files /dev/null and b/lib/pytz/zoneinfo/localtime differ diff --git a/lib/pytz/zoneinfo/posixrules b/lib/pytz/zoneinfo/posixrules new file mode 100644 index 00000000..7553fee3 Binary files /dev/null and b/lib/pytz/zoneinfo/posixrules differ diff --git a/lib/pytz/zoneinfo/zone.tab b/lib/pytz/zoneinfo/zone.tab new file mode 100644 index 00000000..a7373f17 --- /dev/null +++ b/lib/pytz/zoneinfo/zone.tab @@ -0,0 +1,440 @@ +# tz zone descriptions (deprecated version) +# +# This file is in the public domain, so clarified as of +# 2009-05-17 by Arthur David Olson. +# +# From Paul Eggert (2014-07-31): +# This file is intended as a backward-compatibility aid for older programs. +# New programs should use zone1970.tab. This file is like zone1970.tab (see +# zone1970.tab's comments), but with the following additional restrictions: +# +# 1. This file contains only ASCII characters. +# 2. The first data column contains exactly one country code. +# +# Because of (2), each row stands for an area that is the intersection +# of a region identified by a country code and of a zone where civil +# clocks have agreed since 1970; this is a narrower definition than +# that of zone1970.tab. +# +# This table is intended as an aid for users, to help them select time +# zone data entries appropriate for their practical needs. It is not +# intended to take or endorse any position on legal or territorial claims. +# +#country- +#code coordinates TZ comments +AD +4230+00131 Europe/Andorra +AE +2518+05518 Asia/Dubai +AF +3431+06912 Asia/Kabul +AG +1703-06148 America/Antigua +AI +1812-06304 America/Anguilla +AL +4120+01950 Europe/Tirane +AM +4011+04430 Asia/Yerevan +AO -0848+01314 Africa/Luanda +AQ -7750+16636 Antarctica/McMurdo McMurdo, South Pole, Scott (New Zealand time) +AQ -6734-06808 Antarctica/Rothera Rothera Station, Adelaide Island +AQ -6448-06406 Antarctica/Palmer Palmer Station, Anvers Island +AQ -6736+06253 Antarctica/Mawson Mawson Station, Holme Bay +AQ -6835+07758 Antarctica/Davis Davis Station, Vestfold Hills +AQ -6617+11031 Antarctica/Casey Casey Station, Bailey Peninsula +AQ -7824+10654 Antarctica/Vostok Vostok Station, Lake Vostok +AQ -6640+14001 Antarctica/DumontDUrville Dumont-d'Urville Station, Adelie Land +AQ -690022+0393524 Antarctica/Syowa Syowa Station, E Ongul I +AQ -720041+0023206 Antarctica/Troll Troll Station, Queen Maud Land +AR -3436-05827 America/Argentina/Buenos_Aires Buenos Aires (BA, CF) +AR -3124-06411 America/Argentina/Cordoba most locations (CB, CC, CN, ER, FM, MN, SE, SF) +AR -2447-06525 America/Argentina/Salta (SA, LP, NQ, RN) +AR -2411-06518 America/Argentina/Jujuy Jujuy (JY) +AR -2649-06513 America/Argentina/Tucuman Tucuman (TM) +AR -2828-06547 America/Argentina/Catamarca Catamarca (CT), Chubut (CH) +AR -2926-06651 America/Argentina/La_Rioja La Rioja (LR) +AR -3132-06831 America/Argentina/San_Juan San Juan (SJ) +AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ) +AR -3319-06621 America/Argentina/San_Luis San Luis (SL) +AR -5138-06913 America/Argentina/Rio_Gallegos Santa Cruz (SC) +AR -5448-06818 America/Argentina/Ushuaia Tierra del Fuego (TF) +AS -1416-17042 Pacific/Pago_Pago +AT +4813+01620 Europe/Vienna +AU -3133+15905 Australia/Lord_Howe Lord Howe Island +AU -5430+15857 Antarctica/Macquarie Macquarie Island +AU -4253+14719 Australia/Hobart Tasmania - most locations +AU -3956+14352 Australia/Currie Tasmania - King Island +AU -3749+14458 Australia/Melbourne Victoria +AU -3352+15113 Australia/Sydney New South Wales - most locations +AU -3157+14127 Australia/Broken_Hill New South Wales - Yancowinna +AU -2728+15302 Australia/Brisbane Queensland - most locations +AU -2016+14900 Australia/Lindeman Queensland - Holiday Islands +AU -3455+13835 Australia/Adelaide South Australia +AU -1228+13050 Australia/Darwin Northern Territory +AU -3157+11551 Australia/Perth Western Australia - most locations +AU -3143+12852 Australia/Eucla Western Australia - Eucla area +AW +1230-06958 America/Aruba +AX +6006+01957 Europe/Mariehamn +AZ +4023+04951 Asia/Baku +BA +4352+01825 Europe/Sarajevo +BB +1306-05937 America/Barbados +BD +2343+09025 Asia/Dhaka +BE +5050+00420 Europe/Brussels +BF +1222-00131 Africa/Ouagadougou +BG +4241+02319 Europe/Sofia +BH +2623+05035 Asia/Bahrain +BI -0323+02922 Africa/Bujumbura +BJ +0629+00237 Africa/Porto-Novo +BL +1753-06251 America/St_Barthelemy +BM +3217-06446 Atlantic/Bermuda +BN +0456+11455 Asia/Brunei +BO -1630-06809 America/La_Paz +BQ +120903-0681636 America/Kralendijk +BR -0351-03225 America/Noronha Atlantic islands +BR -0127-04829 America/Belem Amapa, E Para +BR -0343-03830 America/Fortaleza NE Brazil (MA, PI, CE, RN, PB) +BR -0803-03454 America/Recife Pernambuco +BR -0712-04812 America/Araguaina Tocantins +BR -0940-03543 America/Maceio Alagoas, Sergipe +BR -1259-03831 America/Bahia Bahia +BR -2332-04637 America/Sao_Paulo S & SE Brazil (GO, DF, MG, ES, RJ, SP, PR, SC, RS) +BR -2027-05437 America/Campo_Grande Mato Grosso do Sul +BR -1535-05605 America/Cuiaba Mato Grosso +BR -0226-05452 America/Santarem W Para +BR -0846-06354 America/Porto_Velho Rondonia +BR +0249-06040 America/Boa_Vista Roraima +BR -0308-06001 America/Manaus E Amazonas +BR -0640-06952 America/Eirunepe W Amazonas +BR -0958-06748 America/Rio_Branco Acre +BS +2505-07721 America/Nassau +BT +2728+08939 Asia/Thimphu +BW -2439+02555 Africa/Gaborone +BY +5354+02734 Europe/Minsk +BZ +1730-08812 America/Belize +CA +4734-05243 America/St_Johns Newfoundland Time, including SE Labrador +CA +4439-06336 America/Halifax Atlantic Time - Nova Scotia (most places), PEI +CA +4612-05957 America/Glace_Bay Atlantic Time - Nova Scotia - places that did not observe DST 1966-1971 +CA +4606-06447 America/Moncton Atlantic Time - New Brunswick +CA +5320-06025 America/Goose_Bay Atlantic Time - Labrador - most locations +CA +5125-05707 America/Blanc-Sablon Atlantic Standard Time - Quebec - Lower North Shore +CA +4339-07923 America/Toronto Eastern Time - Ontario & Quebec - most locations +CA +4901-08816 America/Nipigon Eastern Time - Ontario & Quebec - places that did not observe DST 1967-1973 +CA +4823-08915 America/Thunder_Bay Eastern Time - Thunder Bay, Ontario +CA +6344-06828 America/Iqaluit Eastern Time - east Nunavut - most locations +CA +6608-06544 America/Pangnirtung Eastern Time - Pangnirtung, Nunavut +CA +744144-0944945 America/Resolute Central Time - Resolute, Nunavut +CA +484531-0913718 America/Atikokan Eastern Standard Time - Atikokan, Ontario and Southampton I, Nunavut +CA +624900-0920459 America/Rankin_Inlet Central Time - central Nunavut +CA +4953-09709 America/Winnipeg Central Time - Manitoba & west Ontario +CA +4843-09434 America/Rainy_River Central Time - Rainy River & Fort Frances, Ontario +CA +5024-10439 America/Regina Central Standard Time - Saskatchewan - most locations +CA +5017-10750 America/Swift_Current Central Standard Time - Saskatchewan - midwest +CA +5333-11328 America/Edmonton Mountain Time - Alberta, east British Columbia & west Saskatchewan +CA +690650-1050310 America/Cambridge_Bay Mountain Time - west Nunavut +CA +6227-11421 America/Yellowknife Mountain Time - central Northwest Territories +CA +682059-1334300 America/Inuvik Mountain Time - west Northwest Territories +CA +4906-11631 America/Creston Mountain Standard Time - Creston, British Columbia +CA +5946-12014 America/Dawson_Creek Mountain Standard Time - Dawson Creek & Fort Saint John, British Columbia +CA +4916-12307 America/Vancouver Pacific Time - west British Columbia +CA +6043-13503 America/Whitehorse Pacific Time - south Yukon +CA +6404-13925 America/Dawson Pacific Time - north Yukon +CC -1210+09655 Indian/Cocos +CD -0418+01518 Africa/Kinshasa west Dem. Rep. of Congo +CD -1140+02728 Africa/Lubumbashi east Dem. Rep. of Congo +CF +0422+01835 Africa/Bangui +CG -0416+01517 Africa/Brazzaville +CH +4723+00832 Europe/Zurich +CI +0519-00402 Africa/Abidjan +CK -2114-15946 Pacific/Rarotonga +CL -3327-07040 America/Santiago most locations +CL -2709-10926 Pacific/Easter Easter Island +CM +0403+00942 Africa/Douala +CN +3114+12128 Asia/Shanghai Beijing Time +CN +4348+08735 Asia/Urumqi Xinjiang Time +CO +0436-07405 America/Bogota +CR +0956-08405 America/Costa_Rica +CU +2308-08222 America/Havana +CV +1455-02331 Atlantic/Cape_Verde +CW +1211-06900 America/Curacao +CX -1025+10543 Indian/Christmas +CY +3510+03322 Asia/Nicosia +CZ +5005+01426 Europe/Prague +DE +5230+01322 Europe/Berlin most locations +DE +4742+00841 Europe/Busingen Busingen +DJ +1136+04309 Africa/Djibouti +DK +5540+01235 Europe/Copenhagen +DM +1518-06124 America/Dominica +DO +1828-06954 America/Santo_Domingo +DZ +3647+00303 Africa/Algiers +EC -0210-07950 America/Guayaquil mainland +EC -0054-08936 Pacific/Galapagos Galapagos Islands +EE +5925+02445 Europe/Tallinn +EG +3003+03115 Africa/Cairo +EH +2709-01312 Africa/El_Aaiun +ER +1520+03853 Africa/Asmara +ES +4024-00341 Europe/Madrid mainland +ES +3553-00519 Africa/Ceuta Ceuta & Melilla +ES +2806-01524 Atlantic/Canary Canary Islands +ET +0902+03842 Africa/Addis_Ababa +FI +6010+02458 Europe/Helsinki +FJ -1808+17825 Pacific/Fiji +FK -5142-05751 Atlantic/Stanley +FM +0725+15147 Pacific/Chuuk Chuuk (Truk) and Yap +FM +0658+15813 Pacific/Pohnpei Pohnpei (Ponape) +FM +0519+16259 Pacific/Kosrae Kosrae +FO +6201-00646 Atlantic/Faroe +FR +4852+00220 Europe/Paris +GA +0023+00927 Africa/Libreville +GB +513030-0000731 Europe/London +GD +1203-06145 America/Grenada +GE +4143+04449 Asia/Tbilisi +GF +0456-05220 America/Cayenne +GG +4927-00232 Europe/Guernsey +GH +0533-00013 Africa/Accra +GI +3608-00521 Europe/Gibraltar +GL +6411-05144 America/Godthab most locations +GL +7646-01840 America/Danmarkshavn east coast, north of Scoresbysund +GL +7029-02158 America/Scoresbysund Scoresbysund / Ittoqqortoormiit +GL +7634-06847 America/Thule Thule / Pituffik +GM +1328-01639 Africa/Banjul +GN +0931-01343 Africa/Conakry +GP +1614-06132 America/Guadeloupe +GQ +0345+00847 Africa/Malabo +GR +3758+02343 Europe/Athens +GS -5416-03632 Atlantic/South_Georgia +GT +1438-09031 America/Guatemala +GU +1328+14445 Pacific/Guam +GW +1151-01535 Africa/Bissau +GY +0648-05810 America/Guyana +HK +2217+11409 Asia/Hong_Kong +HN +1406-08713 America/Tegucigalpa +HR +4548+01558 Europe/Zagreb +HT +1832-07220 America/Port-au-Prince +HU +4730+01905 Europe/Budapest +ID -0610+10648 Asia/Jakarta Java & Sumatra +ID -0002+10920 Asia/Pontianak west & central Borneo +ID -0507+11924 Asia/Makassar east & south Borneo, Sulawesi (Celebes), Bali, Nusa Tengarra, west Timor +ID -0232+14042 Asia/Jayapura west New Guinea (Irian Jaya) & Malukus (Moluccas) +IE +5320-00615 Europe/Dublin +IL +314650+0351326 Asia/Jerusalem +IM +5409-00428 Europe/Isle_of_Man +IN +2232+08822 Asia/Kolkata +IO -0720+07225 Indian/Chagos +IQ +3321+04425 Asia/Baghdad +IR +3540+05126 Asia/Tehran +IS +6409-02151 Atlantic/Reykjavik +IT +4154+01229 Europe/Rome +JE +4912-00207 Europe/Jersey +JM +175805-0764736 America/Jamaica +JO +3157+03556 Asia/Amman +JP +353916+1394441 Asia/Tokyo +KE -0117+03649 Africa/Nairobi +KG +4254+07436 Asia/Bishkek +KH +1133+10455 Asia/Phnom_Penh +KI +0125+17300 Pacific/Tarawa Gilbert Islands +KI -0308-17105 Pacific/Enderbury Phoenix Islands +KI +0152-15720 Pacific/Kiritimati Line Islands +KM -1141+04316 Indian/Comoro +KN +1718-06243 America/St_Kitts +KP +3901+12545 Asia/Pyongyang +KR +3733+12658 Asia/Seoul +KW +2920+04759 Asia/Kuwait +KY +1918-08123 America/Cayman +KZ +4315+07657 Asia/Almaty most locations +KZ +4448+06528 Asia/Qyzylorda Qyzylorda (Kyzylorda, Kzyl-Orda) +KZ +5017+05710 Asia/Aqtobe Aqtobe (Aktobe) +KZ +4431+05016 Asia/Aqtau Atyrau (Atirau, Gur'yev), Mangghystau (Mankistau) +KZ +5113+05121 Asia/Oral West Kazakhstan +LA +1758+10236 Asia/Vientiane +LB +3353+03530 Asia/Beirut +LC +1401-06100 America/St_Lucia +LI +4709+00931 Europe/Vaduz +LK +0656+07951 Asia/Colombo +LR +0618-01047 Africa/Monrovia +LS -2928+02730 Africa/Maseru +LT +5441+02519 Europe/Vilnius +LU +4936+00609 Europe/Luxembourg +LV +5657+02406 Europe/Riga +LY +3254+01311 Africa/Tripoli +MA +3339-00735 Africa/Casablanca +MC +4342+00723 Europe/Monaco +MD +4700+02850 Europe/Chisinau +ME +4226+01916 Europe/Podgorica +MF +1804-06305 America/Marigot +MG -1855+04731 Indian/Antananarivo +MH +0709+17112 Pacific/Majuro most locations +MH +0905+16720 Pacific/Kwajalein Kwajalein +MK +4159+02126 Europe/Skopje +ML +1239-00800 Africa/Bamako +MM +1647+09610 Asia/Rangoon +MN +4755+10653 Asia/Ulaanbaatar most locations +MN +4801+09139 Asia/Hovd Bayan-Olgiy, Govi-Altai, Hovd, Uvs, Zavkhan +MN +4804+11430 Asia/Choibalsan Dornod, Sukhbaatar +MO +2214+11335 Asia/Macau +MP +1512+14545 Pacific/Saipan +MQ +1436-06105 America/Martinique +MR +1806-01557 Africa/Nouakchott +MS +1643-06213 America/Montserrat +MT +3554+01431 Europe/Malta +MU -2010+05730 Indian/Mauritius +MV +0410+07330 Indian/Maldives +MW -1547+03500 Africa/Blantyre +MX +1924-09909 America/Mexico_City Central Time - most locations +MX +2105-08646 America/Cancun Central Time - Quintana Roo +MX +2058-08937 America/Merida Central Time - Campeche, Yucatan +MX +2540-10019 America/Monterrey Mexican Central Time - Coahuila, Durango, Nuevo Leon, Tamaulipas away from US border +MX +2550-09730 America/Matamoros US Central Time - Coahuila, Durango, Nuevo Leon, Tamaulipas near US border +MX +2313-10625 America/Mazatlan Mountain Time - S Baja, Nayarit, Sinaloa +MX +2838-10605 America/Chihuahua Mexican Mountain Time - Chihuahua away from US border +MX +2934-10425 America/Ojinaga US Mountain Time - Chihuahua near US border +MX +2904-11058 America/Hermosillo Mountain Standard Time - Sonora +MX +3232-11701 America/Tijuana US Pacific Time - Baja California near US border +MX +3018-11452 America/Santa_Isabel Mexican Pacific Time - Baja California away from US border +MX +2048-10515 America/Bahia_Banderas Mexican Central Time - Bahia de Banderas +MY +0310+10142 Asia/Kuala_Lumpur peninsular Malaysia +MY +0133+11020 Asia/Kuching Sabah & Sarawak +MZ -2558+03235 Africa/Maputo +NA -2234+01706 Africa/Windhoek +NC -2216+16627 Pacific/Noumea +NE +1331+00207 Africa/Niamey +NF -2903+16758 Pacific/Norfolk +NG +0627+00324 Africa/Lagos +NI +1209-08617 America/Managua +NL +5222+00454 Europe/Amsterdam +NO +5955+01045 Europe/Oslo +NP +2743+08519 Asia/Kathmandu +NR -0031+16655 Pacific/Nauru +NU -1901-16955 Pacific/Niue +NZ -3652+17446 Pacific/Auckland most locations +NZ -4357-17633 Pacific/Chatham Chatham Islands +OM +2336+05835 Asia/Muscat +PA +0858-07932 America/Panama +PE -1203-07703 America/Lima +PF -1732-14934 Pacific/Tahiti Society Islands +PF -0900-13930 Pacific/Marquesas Marquesas Islands +PF -2308-13457 Pacific/Gambier Gambier Islands +PG -0930+14710 Pacific/Port_Moresby most locations +PG -0613+15534 Pacific/Bougainville Bougainville +PH +1435+12100 Asia/Manila +PK +2452+06703 Asia/Karachi +PL +5215+02100 Europe/Warsaw +PM +4703-05620 America/Miquelon +PN -2504-13005 Pacific/Pitcairn +PR +182806-0660622 America/Puerto_Rico +PS +3130+03428 Asia/Gaza Gaza Strip +PS +313200+0350542 Asia/Hebron West Bank +PT +3843-00908 Europe/Lisbon mainland +PT +3238-01654 Atlantic/Madeira Madeira Islands +PT +3744-02540 Atlantic/Azores Azores +PW +0720+13429 Pacific/Palau +PY -2516-05740 America/Asuncion +QA +2517+05132 Asia/Qatar +RE -2052+05528 Indian/Reunion +RO +4426+02606 Europe/Bucharest +RS +4450+02030 Europe/Belgrade +RU +5443+02030 Europe/Kaliningrad Moscow-01 - Kaliningrad +RU +554521+0373704 Europe/Moscow Moscow+00 - west Russia +RU +4457+03406 Europe/Simferopol Moscow+00 - Crimea +RU +4844+04425 Europe/Volgograd Moscow+00 - Caspian Sea +RU +5312+05009 Europe/Samara Moscow+00 (Moscow+01 after 2014-10-26) - Samara, Udmurtia +RU +5651+06036 Asia/Yekaterinburg Moscow+02 - Urals +RU +5500+07324 Asia/Omsk Moscow+03 - west Siberia +RU +5502+08255 Asia/Novosibirsk Moscow+03 - Novosibirsk +RU +5345+08707 Asia/Novokuznetsk Moscow+03 (Moscow+04 after 2014-10-26) - Kemerovo +RU +5601+09250 Asia/Krasnoyarsk Moscow+04 - Yenisei River +RU +5216+10420 Asia/Irkutsk Moscow+05 - Lake Baikal +RU +5203+11328 Asia/Chita Moscow+06 (Moscow+05 after 2014-10-26) - Zabaykalsky +RU +6200+12940 Asia/Yakutsk Moscow+06 - Lena River +RU +623923+1353314 Asia/Khandyga Moscow+06 - Tomponsky, Ust-Maysky +RU +4310+13156 Asia/Vladivostok Moscow+07 - Amur River +RU +4658+14242 Asia/Sakhalin Moscow+07 - Sakhalin Island +RU +643337+1431336 Asia/Ust-Nera Moscow+07 - Oymyakonsky +RU +5934+15048 Asia/Magadan Moscow+08 (Moscow+07 after 2014-10-26) - Magadan +RU +6728+15343 Asia/Srednekolymsk Moscow+08 - E Sakha, N Kuril Is +RU +5301+15839 Asia/Kamchatka Moscow+08 (Moscow+09 after 2014-10-26) - Kamchatka +RU +6445+17729 Asia/Anadyr Moscow+08 (Moscow+09 after 2014-10-26) - Bering Sea +RW -0157+03004 Africa/Kigali +SA +2438+04643 Asia/Riyadh +SB -0932+16012 Pacific/Guadalcanal +SC -0440+05528 Indian/Mahe +SD +1536+03232 Africa/Khartoum +SE +5920+01803 Europe/Stockholm +SG +0117+10351 Asia/Singapore +SH -1555-00542 Atlantic/St_Helena +SI +4603+01431 Europe/Ljubljana +SJ +7800+01600 Arctic/Longyearbyen +SK +4809+01707 Europe/Bratislava +SL +0830-01315 Africa/Freetown +SM +4355+01228 Europe/San_Marino +SN +1440-01726 Africa/Dakar +SO +0204+04522 Africa/Mogadishu +SR +0550-05510 America/Paramaribo +SS +0451+03136 Africa/Juba +ST +0020+00644 Africa/Sao_Tome +SV +1342-08912 America/El_Salvador +SX +180305-0630250 America/Lower_Princes +SY +3330+03618 Asia/Damascus +SZ -2618+03106 Africa/Mbabane +TC +2128-07108 America/Grand_Turk +TD +1207+01503 Africa/Ndjamena +TF -492110+0701303 Indian/Kerguelen +TG +0608+00113 Africa/Lome +TH +1345+10031 Asia/Bangkok +TJ +3835+06848 Asia/Dushanbe +TK -0922-17114 Pacific/Fakaofo +TL -0833+12535 Asia/Dili +TM +3757+05823 Asia/Ashgabat +TN +3648+01011 Africa/Tunis +TO -2110-17510 Pacific/Tongatapu +TR +4101+02858 Europe/Istanbul +TT +1039-06131 America/Port_of_Spain +TV -0831+17913 Pacific/Funafuti +TW +2503+12130 Asia/Taipei +TZ -0648+03917 Africa/Dar_es_Salaam +UA +5026+03031 Europe/Kiev most locations +UA +4837+02218 Europe/Uzhgorod Ruthenia +UA +4750+03510 Europe/Zaporozhye Zaporozh'ye, E Lugansk / Zaporizhia, E Luhansk +UG +0019+03225 Africa/Kampala +UM +1645-16931 Pacific/Johnston Johnston Atoll +UM +2813-17722 Pacific/Midway Midway Islands +UM +1917+16637 Pacific/Wake Wake Island +US +404251-0740023 America/New_York Eastern Time +US +421953-0830245 America/Detroit Eastern Time - Michigan - most locations +US +381515-0854534 America/Kentucky/Louisville Eastern Time - Kentucky - Louisville area +US +364947-0845057 America/Kentucky/Monticello Eastern Time - Kentucky - Wayne County +US +394606-0860929 America/Indiana/Indianapolis Eastern Time - Indiana - most locations +US +384038-0873143 America/Indiana/Vincennes Eastern Time - Indiana - Daviess, Dubois, Knox & Martin Counties +US +410305-0863611 America/Indiana/Winamac Eastern Time - Indiana - Pulaski County +US +382232-0862041 America/Indiana/Marengo Eastern Time - Indiana - Crawford County +US +382931-0871643 America/Indiana/Petersburg Eastern Time - Indiana - Pike County +US +384452-0850402 America/Indiana/Vevay Eastern Time - Indiana - Switzerland County +US +415100-0873900 America/Chicago Central Time +US +375711-0864541 America/Indiana/Tell_City Central Time - Indiana - Perry County +US +411745-0863730 America/Indiana/Knox Central Time - Indiana - Starke County +US +450628-0873651 America/Menominee Central Time - Michigan - Dickinson, Gogebic, Iron & Menominee Counties +US +470659-1011757 America/North_Dakota/Center Central Time - North Dakota - Oliver County +US +465042-1012439 America/North_Dakota/New_Salem Central Time - North Dakota - Morton County (except Mandan area) +US +471551-1014640 America/North_Dakota/Beulah Central Time - North Dakota - Mercer County +US +394421-1045903 America/Denver Mountain Time +US +433649-1161209 America/Boise Mountain Time - south Idaho & east Oregon +US +332654-1120424 America/Phoenix Mountain Standard Time - Arizona (except Navajo) +US +340308-1181434 America/Los_Angeles Pacific Time +US +550737-1313435 America/Metlakatla Pacific Standard Time - Annette Island, Alaska +US +611305-1495401 America/Anchorage Alaska Time +US +581807-1342511 America/Juneau Alaska Time - Alaska panhandle +US +571035-1351807 America/Sitka Alaska Time - southeast Alaska panhandle +US +593249-1394338 America/Yakutat Alaska Time - Alaska panhandle neck +US +643004-1652423 America/Nome Alaska Time - west Alaska +US +515248-1763929 America/Adak Aleutian Islands +US +211825-1575130 Pacific/Honolulu Hawaii +UY -3453-05611 America/Montevideo +UZ +3940+06648 Asia/Samarkand west Uzbekistan +UZ +4120+06918 Asia/Tashkent east Uzbekistan +VA +415408+0122711 Europe/Vatican +VC +1309-06114 America/St_Vincent +VE +1030-06656 America/Caracas +VG +1827-06437 America/Tortola +VI +1821-06456 America/St_Thomas +VN +1045+10640 Asia/Ho_Chi_Minh +VU -1740+16825 Pacific/Efate +WF -1318-17610 Pacific/Wallis +WS -1350-17144 Pacific/Apia +YE +1245+04512 Asia/Aden +YT -1247+04514 Indian/Mayotte +ZA -2615+02800 Africa/Johannesburg +ZM -1525+02817 Africa/Lusaka +ZW -1750+03103 Africa/Harare diff --git a/lib/pytz/zoneinfo/zone1970.tab b/lib/pytz/zoneinfo/zone1970.tab new file mode 100644 index 00000000..e971bc7f --- /dev/null +++ b/lib/pytz/zoneinfo/zone1970.tab @@ -0,0 +1,371 @@ +# tz zone descriptions +# +# This file is in the public domain. +# +# From Paul Eggert (2014-07-31): +# This file contains a table where each row stands for a zone where +# civil time stamps have agreed since 1970. Columns are separated by +# a single tab. Lines beginning with '#' are comments. All text uses +# UTF-8 encoding. The columns of the table are as follows: +# +# 1. The countries that overlap the zone, as a comma-separated list +# of ISO 3166 2-character country codes. See the file 'iso3166.tab'. +# 2. Latitude and longitude of the zone's principal location +# in ISO 6709 sign-degrees-minutes-seconds format, +# either +-DDMM+-DDDMM or +-DDMMSS+-DDDMMSS, +# first latitude (+ is north), then longitude (+ is east). +# 3. Zone name used in value of TZ environment variable. +# Please see the 'Theory' file for how zone names are chosen. +# If multiple zones overlap a country, each has a row in the +# table, with each column 1 containing the country code. +# 4. Comments; present if and only if a country has multiple zones. +# +# If a zone covers multiple countries, the most-populous city is used, +# and that country is listed first in column 1; any other countries +# are listed alphabetically by country code. The table is sorted +# first by country code, then (if possible) by an order within the +# country that (1) makes some geographical sense, and (2) puts the +# most populous zones first, where that does not contradict (1). +# +# This table is intended as an aid for users, to help them select time +# zone data entries appropriate for their practical needs. It is not +# intended to take or endorse any position on legal or territorial claims. +# +#country- +#codes coordinates TZ comments +AD +4230+00131 Europe/Andorra +AE,OM +2518+05518 Asia/Dubai +AF +3431+06912 Asia/Kabul +AL +4120+01950 Europe/Tirane +AM +4011+04430 Asia/Yerevan +AQ -6734-06808 Antarctica/Rothera Rothera Station, Adelaide Island +AQ -6448-06406 Antarctica/Palmer Palmer Station, Anvers Island +AQ -6736+06253 Antarctica/Mawson Mawson Station, Holme Bay +AQ -6835+07758 Antarctica/Davis Davis Station, Vestfold Hills +AQ -6617+11031 Antarctica/Casey Casey Station, Bailey Peninsula +AQ -7824+10654 Antarctica/Vostok Vostok Station, Lake Vostok +AQ -6640+14001 Antarctica/DumontDUrville Dumont-d'Urville Station, Adélie Land +AQ -690022+0393524 Antarctica/Syowa Syowa Station, E Ongul I +AQ -720041+0023206 Antarctica/Troll Troll Station, Queen Maud Land +AR -3436-05827 America/Argentina/Buenos_Aires Buenos Aires (BA, CF) +AR -3124-06411 America/Argentina/Cordoba most locations (CB, CC, CN, ER, FM, MN, SE, SF) +AR -2447-06525 America/Argentina/Salta (SA, LP, NQ, RN) +AR -2411-06518 America/Argentina/Jujuy Jujuy (JY) +AR -2649-06513 America/Argentina/Tucuman Tucumán (TM) +AR -2828-06547 America/Argentina/Catamarca Catamarca (CT), Chubut (CH) +AR -2926-06651 America/Argentina/La_Rioja La Rioja (LR) +AR -3132-06831 America/Argentina/San_Juan San Juan (SJ) +AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ) +AR -3319-06621 America/Argentina/San_Luis San Luis (SL) +AR -5138-06913 America/Argentina/Rio_Gallegos Santa Cruz (SC) +AR -5448-06818 America/Argentina/Ushuaia Tierra del Fuego (TF) +AS,UM -1416-17042 Pacific/Pago_Pago Samoa, Midway +AT +4813+01620 Europe/Vienna +AU -3133+15905 Australia/Lord_Howe Lord Howe Island +AU -5430+15857 Antarctica/Macquarie Macquarie Island +AU -4253+14719 Australia/Hobart Tasmania - most locations +AU -3956+14352 Australia/Currie Tasmania - King Island +AU -3749+14458 Australia/Melbourne Victoria +AU -3352+15113 Australia/Sydney New South Wales - most locations +AU -3157+14127 Australia/Broken_Hill New South Wales - Yancowinna +AU -2728+15302 Australia/Brisbane Queensland - most locations +AU -2016+14900 Australia/Lindeman Queensland - Holiday Islands +AU -3455+13835 Australia/Adelaide South Australia +AU -1228+13050 Australia/Darwin Northern Territory +AU -3157+11551 Australia/Perth Western Australia - most locations +AU -3143+12852 Australia/Eucla Western Australia - Eucla area +AZ +4023+04951 Asia/Baku +BB +1306-05937 America/Barbados +BD +2343+09025 Asia/Dhaka +BE +5050+00420 Europe/Brussels +BG +4241+02319 Europe/Sofia +BM +3217-06446 Atlantic/Bermuda +BN +0456+11455 Asia/Brunei +BO -1630-06809 America/La_Paz +BR -0351-03225 America/Noronha Atlantic islands +BR -0127-04829 America/Belem Amapá, E Pará +BR -0343-03830 America/Fortaleza NE Brazil (MA, PI, CE, RN, PB) +BR -0803-03454 America/Recife Pernambuco +BR -0712-04812 America/Araguaina Tocantins +BR -0940-03543 America/Maceio Alagoas, Sergipe +BR -1259-03831 America/Bahia Bahia +BR -2332-04637 America/Sao_Paulo S & SE Brazil (GO, DF, MG, ES, RJ, SP, PR, SC, RS) +BR -2027-05437 America/Campo_Grande Mato Grosso do Sul +BR -1535-05605 America/Cuiaba Mato Grosso +BR -0226-05452 America/Santarem W Pará +BR -0846-06354 America/Porto_Velho Rondônia +BR +0249-06040 America/Boa_Vista Roraima +BR -0308-06001 America/Manaus E Amazonas +BR -0640-06952 America/Eirunepe W Amazonas +BR -0958-06748 America/Rio_Branco Acre +BS +2505-07721 America/Nassau +BT +2728+08939 Asia/Thimphu +BY +5354+02734 Europe/Minsk +BZ +1730-08812 America/Belize +CA +4734-05243 America/St_Johns Newfoundland Time, including SE Labrador +CA +4439-06336 America/Halifax Atlantic Time - Nova Scotia (most places), PEI +CA +4612-05957 America/Glace_Bay Atlantic Time - Nova Scotia - places that did not observe DST 1966-1971 +CA +4606-06447 America/Moncton Atlantic Time - New Brunswick +CA +5320-06025 America/Goose_Bay Atlantic Time - Labrador - most locations +CA +5125-05707 America/Blanc-Sablon Atlantic Standard Time - Quebec - Lower North Shore +CA +4339-07923 America/Toronto Eastern Time - Ontario & Quebec - most locations +CA +4901-08816 America/Nipigon Eastern Time - Ontario & Quebec - places that did not observe DST 1967-1973 +CA +4823-08915 America/Thunder_Bay Eastern Time - Thunder Bay, Ontario +CA +6344-06828 America/Iqaluit Eastern Time - east Nunavut - most locations +CA +6608-06544 America/Pangnirtung Eastern Time - Pangnirtung, Nunavut +CA +744144-0944945 America/Resolute Central Time - Resolute, Nunavut +CA +484531-0913718 America/Atikokan Eastern Standard Time - Atikokan, Ontario and Southampton I, Nunavut +CA +624900-0920459 America/Rankin_Inlet Central Time - central Nunavut +CA +4953-09709 America/Winnipeg Central Time - Manitoba & west Ontario +CA +4843-09434 America/Rainy_River Central Time - Rainy River & Fort Frances, Ontario +CA +5024-10439 America/Regina Central Standard Time - Saskatchewan - most locations +CA +5017-10750 America/Swift_Current Central Standard Time - Saskatchewan - midwest +CA +5333-11328 America/Edmonton Mountain Time - Alberta, east British Columbia & west Saskatchewan +CA +690650-1050310 America/Cambridge_Bay Mountain Time - west Nunavut +CA +6227-11421 America/Yellowknife Mountain Time - central Northwest Territories +CA +682059-1334300 America/Inuvik Mountain Time - west Northwest Territories +CA +4906-11631 America/Creston Mountain Standard Time - Creston, British Columbia +CA +5946-12014 America/Dawson_Creek Mountain Standard Time - Dawson Creek & Fort Saint John, British Columbia +CA +4916-12307 America/Vancouver Pacific Time - west British Columbia +CA +6043-13503 America/Whitehorse Pacific Time - south Yukon +CA +6404-13925 America/Dawson Pacific Time - north Yukon +CC -1210+09655 Indian/Cocos +CH,DE,LI +4723+00832 Europe/Zurich Swiss time +CI,BF,GM,GN,ML,MR,SH,SL,SN,ST,TG +0519-00402 Africa/Abidjan +CK -2114-15946 Pacific/Rarotonga +CL -3327-07040 America/Santiago most locations +CL -2709-10926 Pacific/Easter Easter Island +CN +3114+12128 Asia/Shanghai Beijing Time +CN +4348+08735 Asia/Urumqi Xinjiang Time +CO +0436-07405 America/Bogota +CR +0956-08405 America/Costa_Rica +CU +2308-08222 America/Havana +CV +1455-02331 Atlantic/Cape_Verde +CW,AW,BQ,SX +1211-06900 America/Curacao +CX -1025+10543 Indian/Christmas +CY +3510+03322 Asia/Nicosia +CZ,SK +5005+01426 Europe/Prague +DE +5230+01322 Europe/Berlin Berlin time +DK +5540+01235 Europe/Copenhagen +DO +1828-06954 America/Santo_Domingo +DZ +3647+00303 Africa/Algiers +EC -0210-07950 America/Guayaquil mainland +EC -0054-08936 Pacific/Galapagos Galápagos Islands +EE +5925+02445 Europe/Tallinn +EG +3003+03115 Africa/Cairo +EH +2709-01312 Africa/El_Aaiun +ES +4024-00341 Europe/Madrid mainland +ES +3553-00519 Africa/Ceuta Ceuta & Melilla +ES +2806-01524 Atlantic/Canary Canary Islands +FI,AX +6010+02458 Europe/Helsinki +FJ -1808+17825 Pacific/Fiji +FK -5142-05751 Atlantic/Stanley +FM +0725+15147 Pacific/Chuuk Chuuk (Truk) and Yap +FM +0658+15813 Pacific/Pohnpei Pohnpei (Ponape) +FM +0519+16259 Pacific/Kosrae Kosrae +FO +6201-00646 Atlantic/Faroe +FR +4852+00220 Europe/Paris +GB,GG,IM,JE +513030-0000731 Europe/London +GE +4143+04449 Asia/Tbilisi +GF +0456-05220 America/Cayenne +GH +0533-00013 Africa/Accra +GI +3608-00521 Europe/Gibraltar +GL +6411-05144 America/Godthab most locations +GL +7646-01840 America/Danmarkshavn east coast, north of Scoresbysund +GL +7029-02158 America/Scoresbysund Scoresbysund / Ittoqqortoormiit +GL +7634-06847 America/Thule Thule / Pituffik +GR +3758+02343 Europe/Athens +GS -5416-03632 Atlantic/South_Georgia +GT +1438-09031 America/Guatemala +GU,MP +1328+14445 Pacific/Guam +GW +1151-01535 Africa/Bissau +GY +0648-05810 America/Guyana +HK +2217+11409 Asia/Hong_Kong +HN +1406-08713 America/Tegucigalpa +HT +1832-07220 America/Port-au-Prince +HU +4730+01905 Europe/Budapest +ID -0610+10648 Asia/Jakarta Java & Sumatra +ID -0002+10920 Asia/Pontianak west & central Borneo +ID -0507+11924 Asia/Makassar east & south Borneo, Sulawesi (Celebes), Bali, Nusa Tengarra, west Timor +ID -0232+14042 Asia/Jayapura west New Guinea (Irian Jaya) & Malukus (Moluccas) +IE +5320-00615 Europe/Dublin +IL +314650+0351326 Asia/Jerusalem +IN +2232+08822 Asia/Kolkata +IO -0720+07225 Indian/Chagos +IQ +3321+04425 Asia/Baghdad +IR +3540+05126 Asia/Tehran +IS +6409-02151 Atlantic/Reykjavik +IT,SM,VA +4154+01229 Europe/Rome +JM +175805-0764736 America/Jamaica +JO +3157+03556 Asia/Amman +JP +353916+1394441 Asia/Tokyo +KE,DJ,ER,ET,KM,MG,SO,TZ,UG,YT -0117+03649 Africa/Nairobi +KG +4254+07436 Asia/Bishkek +KI +0125+17300 Pacific/Tarawa Gilbert Islands +KI -0308-17105 Pacific/Enderbury Phoenix Islands +KI +0152-15720 Pacific/Kiritimati Line Islands +KP +3901+12545 Asia/Pyongyang +KR +3733+12658 Asia/Seoul +KZ +4315+07657 Asia/Almaty most locations +KZ +4448+06528 Asia/Qyzylorda Qyzylorda (Kyzylorda, Kzyl-Orda) +KZ +5017+05710 Asia/Aqtobe Aqtobe (Aktobe) +KZ +4431+05016 Asia/Aqtau Atyrau (Atirau, Gur'yev), Mangghystau (Mankistau) +KZ +5113+05121 Asia/Oral West Kazakhstan +LB +3353+03530 Asia/Beirut +LK +0656+07951 Asia/Colombo +LR +0618-01047 Africa/Monrovia +LT +5441+02519 Europe/Vilnius +LU +4936+00609 Europe/Luxembourg +LV +5657+02406 Europe/Riga +LY +3254+01311 Africa/Tripoli +MA +3339-00735 Africa/Casablanca +MC +4342+00723 Europe/Monaco +MD +4700+02850 Europe/Chisinau +MH +0709+17112 Pacific/Majuro most locations +MH +0905+16720 Pacific/Kwajalein Kwajalein +MM +1647+09610 Asia/Rangoon +MN +4755+10653 Asia/Ulaanbaatar most locations +MN +4801+09139 Asia/Hovd Bayan-Ölgii, Govi-Altai, Hovd, Uvs, Zavkhan +MN +4804+11430 Asia/Choibalsan Dornod, Sükhbaatar +MO +2214+11335 Asia/Macau +MQ +1436-06105 America/Martinique +MT +3554+01431 Europe/Malta +MU -2010+05730 Indian/Mauritius +MV +0410+07330 Indian/Maldives +MX +1924-09909 America/Mexico_City Central Time - most locations +MX +2105-08646 America/Cancun Central Time - Quintana Roo +MX +2058-08937 America/Merida Central Time - Campeche, Yucatán +MX +2540-10019 America/Monterrey Mexican Central Time - Coahuila, Durango, Nuevo León, Tamaulipas away from US border +MX +2550-09730 America/Matamoros US Central Time - Coahuila, Durango, Nuevo León, Tamaulipas near US border +MX +2313-10625 America/Mazatlan Mountain Time - S Baja, Nayarit, Sinaloa +MX +2838-10605 America/Chihuahua Mexican Mountain Time - Chihuahua away from US border +MX +2934-10425 America/Ojinaga US Mountain Time - Chihuahua near US border +MX +2904-11058 America/Hermosillo Mountain Standard Time - Sonora +MX +3232-11701 America/Tijuana US Pacific Time - Baja California near US border +MX +3018-11452 America/Santa_Isabel Mexican Pacific Time - Baja California away from US border +MX +2048-10515 America/Bahia_Banderas Mexican Central Time - Bahía de Banderas +MY +0310+10142 Asia/Kuala_Lumpur peninsular Malaysia +MY +0133+11020 Asia/Kuching Sabah & Sarawak +MZ,BI,BW,CD,MW,RW,ZM,ZW -2558+03235 Africa/Maputo Central Africa Time (UTC+2) +NA -2234+01706 Africa/Windhoek +NC -2216+16627 Pacific/Noumea +NF -2903+16758 Pacific/Norfolk +NG,AO,BJ,CD,CF,CG,CM,GA,GQ,NE +0627+00324 Africa/Lagos West Africa Time (UTC+1) +NI +1209-08617 America/Managua +NL +5222+00454 Europe/Amsterdam +NO,SJ +5955+01045 Europe/Oslo +NP +2743+08519 Asia/Kathmandu +NR -0031+16655 Pacific/Nauru +NU -1901-16955 Pacific/Niue +NZ,AQ -3652+17446 Pacific/Auckland New Zealand time +NZ -4357-17633 Pacific/Chatham Chatham Islands +PA,KY +0858-07932 America/Panama +PE -1203-07703 America/Lima +PF -1732-14934 Pacific/Tahiti Society Islands +PF -0900-13930 Pacific/Marquesas Marquesas Islands +PF -2308-13457 Pacific/Gambier Gambier Islands +PG -0930+14710 Pacific/Port_Moresby most locations +PG -0613+15534 Pacific/Bougainville Bougainville +PH +1435+12100 Asia/Manila +PK +2452+06703 Asia/Karachi +PL +5215+02100 Europe/Warsaw +PM +4703-05620 America/Miquelon +PN -2504-13005 Pacific/Pitcairn +PR +182806-0660622 America/Puerto_Rico +PS +3130+03428 Asia/Gaza Gaza Strip +PS +313200+0350542 Asia/Hebron West Bank +PT +3843-00908 Europe/Lisbon mainland +PT +3238-01654 Atlantic/Madeira Madeira Islands +PT +3744-02540 Atlantic/Azores Azores +PW +0720+13429 Pacific/Palau +PY -2516-05740 America/Asuncion +QA,BH +2517+05132 Asia/Qatar +RE,TF -2052+05528 Indian/Reunion Réunion, Crozet Is, Scattered Is +RO +4426+02606 Europe/Bucharest +RS,BA,HR,ME,MK,SI +4450+02030 Europe/Belgrade +RU +5443+02030 Europe/Kaliningrad Moscow-01 - Kaliningrad +RU +554521+0373704 Europe/Moscow Moscow+00 - west Russia +RU +4457+03406 Europe/Simferopol Moscow+00 - Crimea +RU +4844+04425 Europe/Volgograd Moscow+00 - Caspian Sea +RU +5312+05009 Europe/Samara Moscow+00 (Moscow+01 after 2014-10-26) - Samara, Udmurtia +RU +5651+06036 Asia/Yekaterinburg Moscow+02 - Urals +RU +5500+07324 Asia/Omsk Moscow+03 - west Siberia +RU +5502+08255 Asia/Novosibirsk Moscow+03 - Novosibirsk +RU +5345+08707 Asia/Novokuznetsk Moscow+03 (Moscow+04 after 2014-10-26) - Kemerovo +RU +5601+09250 Asia/Krasnoyarsk Moscow+04 - Yenisei River +RU +5216+10420 Asia/Irkutsk Moscow+05 - Lake Baikal +RU +5203+11328 Asia/Chita Moscow+06 (Moscow+05 after 2014-10-26) - Zabaykalsky +RU +6200+12940 Asia/Yakutsk Moscow+06 - Lena River +RU +623923+1353314 Asia/Khandyga Moscow+06 - Tomponsky, Ust-Maysky +RU +4310+13156 Asia/Vladivostok Moscow+07 - Amur River +RU +4658+14242 Asia/Sakhalin Moscow+07 - Sakhalin Island +RU +643337+1431336 Asia/Ust-Nera Moscow+07 - Oymyakonsky +RU +5934+15048 Asia/Magadan Moscow+08 (Moscow+07 after 2014-10-26) - Magadan +RU +6728+15343 Asia/Srednekolymsk Moscow+08 - E Sakha, N Kuril Is +RU +5301+15839 Asia/Kamchatka Moscow+08 (Moscow+09 after 2014-10-26) - Kamchatka +RU +6445+17729 Asia/Anadyr Moscow+08 (Moscow+09 after 2014-10-26) - Bering Sea +SA,KW,YE +2438+04643 Asia/Riyadh +SB -0932+16012 Pacific/Guadalcanal +SC -0440+05528 Indian/Mahe +SD,SS +1536+03232 Africa/Khartoum +SE +5920+01803 Europe/Stockholm +SG +0117+10351 Asia/Singapore +SR +0550-05510 America/Paramaribo +SV +1342-08912 America/El_Salvador +SY +3330+03618 Asia/Damascus +TC +2128-07108 America/Grand_Turk +TD +1207+01503 Africa/Ndjamena +TF -492110+0701303 Indian/Kerguelen Kerguelen, St Paul I, Amsterdam I +TH,KH,LA,VN +1345+10031 Asia/Bangkok most of Indochina +TJ +3835+06848 Asia/Dushanbe +TK -0922-17114 Pacific/Fakaofo +TL -0833+12535 Asia/Dili +TM +3757+05823 Asia/Ashgabat +TN +3648+01011 Africa/Tunis +TO -2110-17510 Pacific/Tongatapu +TR +4101+02858 Europe/Istanbul +TT,AG,AI,BL,DM,GD,GP,KN,LC,MF,MS,VC,VG,VI +1039-06131 America/Port_of_Spain +TV -0831+17913 Pacific/Funafuti +TW +2503+12130 Asia/Taipei +UA +5026+03031 Europe/Kiev most locations +UA +4837+02218 Europe/Uzhgorod Ruthenia +UA +4750+03510 Europe/Zaporozhye Zaporozh'ye, E Lugansk / Zaporizhia, E Luhansk +UM +1917+16637 Pacific/Wake Wake Island +US +404251-0740023 America/New_York Eastern Time +US +421953-0830245 America/Detroit Eastern Time - Michigan - most locations +US +381515-0854534 America/Kentucky/Louisville Eastern Time - Kentucky - Louisville area +US +364947-0845057 America/Kentucky/Monticello Eastern Time - Kentucky - Wayne County +US +394606-0860929 America/Indiana/Indianapolis Eastern Time - Indiana - most locations +US +384038-0873143 America/Indiana/Vincennes Eastern Time - Indiana - Daviess, Dubois, Knox & Martin Counties +US +410305-0863611 America/Indiana/Winamac Eastern Time - Indiana - Pulaski County +US +382232-0862041 America/Indiana/Marengo Eastern Time - Indiana - Crawford County +US +382931-0871643 America/Indiana/Petersburg Eastern Time - Indiana - Pike County +US +384452-0850402 America/Indiana/Vevay Eastern Time - Indiana - Switzerland County +US +415100-0873900 America/Chicago Central Time +US +375711-0864541 America/Indiana/Tell_City Central Time - Indiana - Perry County +US +411745-0863730 America/Indiana/Knox Central Time - Indiana - Starke County +US +450628-0873651 America/Menominee Central Time - Michigan - Dickinson, Gogebic, Iron & Menominee Counties +US +470659-1011757 America/North_Dakota/Center Central Time - North Dakota - Oliver County +US +465042-1012439 America/North_Dakota/New_Salem Central Time - North Dakota - Morton County (except Mandan area) +US +471551-1014640 America/North_Dakota/Beulah Central Time - North Dakota - Mercer County +US +394421-1045903 America/Denver Mountain Time +US +433649-1161209 America/Boise Mountain Time - south Idaho & east Oregon +US +332654-1120424 America/Phoenix Mountain Standard Time - Arizona (except Navajo) +US +340308-1181434 America/Los_Angeles Pacific Time +US +550737-1313435 America/Metlakatla Pacific Standard Time - Annette Island, Alaska +US +611305-1495401 America/Anchorage Alaska Time +US +581807-1342511 America/Juneau Alaska Time - Alaska panhandle +US +571035-1351807 America/Sitka Alaska Time - southeast Alaska panhandle +US +593249-1394338 America/Yakutat Alaska Time - Alaska panhandle neck +US +643004-1652423 America/Nome Alaska Time - west Alaska +US +515248-1763929 America/Adak Aleutian Islands +US,UM +211825-1575130 Pacific/Honolulu Hawaii time +UY -3453-05611 America/Montevideo +UZ +3940+06648 Asia/Samarkand west Uzbekistan +UZ +4120+06918 Asia/Tashkent east Uzbekistan +VE +1030-06656 America/Caracas +VN +1045+10640 Asia/Ho_Chi_Minh south Vietnam +VU -1740+16825 Pacific/Efate +WF -1318-17610 Pacific/Wallis +WS -1350-17144 Pacific/Apia +ZA,LS,SZ -2615+02800 Africa/Johannesburg diff --git a/lib/tzlocal/LICENSE.txt b/lib/tzlocal/LICENSE.txt new file mode 100644 index 00000000..0e259d42 --- /dev/null +++ b/lib/tzlocal/LICENSE.txt @@ -0,0 +1,121 @@ +Creative Commons Legal Code + +CC0 1.0 Universal + + CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE + LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN + ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS + INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES + REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS + PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM + THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED + HEREUNDER. + +Statement of Purpose + +The laws of most jurisdictions throughout the world automatically confer +exclusive Copyright and Related Rights (defined below) upon the creator +and subsequent owner(s) (each and all, an "owner") of an original work of +authorship and/or a database (each, a "Work"). + +Certain owners wish to permanently relinquish those rights to a Work for +the purpose of contributing to a commons of creative, cultural and +scientific works ("Commons") that the public can reliably and without fear +of later claims of infringement build upon, modify, incorporate in other +works, reuse and redistribute as freely as possible in any form whatsoever +and for any purposes, including without limitation commercial purposes. +These owners may contribute to the Commons to promote the ideal of a free +culture and the further production of creative, cultural and scientific +works, or to gain reputation or greater distribution for their Work in +part through the use and efforts of others. + +For these and/or other purposes and motivations, and without any +expectation of additional consideration or compensation, the person +associating CC0 with a Work (the "Affirmer"), to the extent that he or she +is an owner of Copyright and Related Rights in the Work, voluntarily +elects to apply CC0 to the Work and publicly distribute the Work under its +terms, with knowledge of his or her Copyright and Related Rights in the +Work and the meaning and intended legal effect of CC0 on those rights. + +1. Copyright and Related Rights. A Work made available under CC0 may be +protected by copyright and related or neighboring rights ("Copyright and +Related Rights"). Copyright and Related Rights include, but are not +limited to, the following: + + i. the right to reproduce, adapt, distribute, perform, display, + communicate, and translate a Work; + ii. moral rights retained by the original author(s) and/or performer(s); +iii. publicity and privacy rights pertaining to a person's image or + likeness depicted in a Work; + iv. rights protecting against unfair competition in regards to a Work, + subject to the limitations in paragraph 4(a), below; + v. rights protecting the extraction, dissemination, use and reuse of data + in a Work; + vi. database rights (such as those arising under Directive 96/9/EC of the + European Parliament and of the Council of 11 March 1996 on the legal + protection of databases, and under any national implementation + thereof, including any amended or successor version of such + directive); and +vii. other similar, equivalent or corresponding rights throughout the + world based on applicable law or treaty, and any national + implementations thereof. + +2. Waiver. To the greatest extent permitted by, but not in contravention +of, applicable law, Affirmer hereby overtly, fully, permanently, +irrevocably and unconditionally waives, abandons, and surrenders all of +Affirmer's Copyright and Related Rights and associated claims and causes +of action, whether now known or unknown (including existing as well as +future claims and causes of action), in the Work (i) in all territories +worldwide, (ii) for the maximum duration provided by applicable law or +treaty (including future time extensions), (iii) in any current or future +medium and for any number of copies, and (iv) for any purpose whatsoever, +including without limitation commercial, advertising or promotional +purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each +member of the public at large and to the detriment of Affirmer's heirs and +successors, fully intending that such Waiver shall not be subject to +revocation, rescission, cancellation, termination, or any other legal or +equitable action to disrupt the quiet enjoyment of the Work by the public +as contemplated by Affirmer's express Statement of Purpose. + +3. Public License Fallback. Should any part of the Waiver for any reason +be judged legally invalid or ineffective under applicable law, then the +Waiver shall be preserved to the maximum extent permitted taking into +account Affirmer's express Statement of Purpose. In addition, to the +extent the Waiver is so judged Affirmer hereby grants to each affected +person a royalty-free, non transferable, non sublicensable, non exclusive, +irrevocable and unconditional license to exercise Affirmer's Copyright and +Related Rights in the Work (i) in all territories worldwide, (ii) for the +maximum duration provided by applicable law or treaty (including future +time extensions), (iii) in any current or future medium and for any number +of copies, and (iv) for any purpose whatsoever, including without +limitation commercial, advertising or promotional purposes (the +"License"). The License shall be deemed effective as of the date CC0 was +applied by Affirmer to the Work. Should any part of the License for any +reason be judged legally invalid or ineffective under applicable law, such +partial invalidity or ineffectiveness shall not invalidate the remainder +of the License, and in such case Affirmer hereby affirms that he or she +will not (i) exercise any of his or her remaining Copyright and Related +Rights in the Work or (ii) assert any associated claims and causes of +action with respect to the Work, in either case contrary to Affirmer's +express Statement of Purpose. + +4. Limitations and Disclaimers. + + a. No trademark or patent rights held by Affirmer are waived, abandoned, + surrendered, licensed or otherwise affected by this document. + b. Affirmer offers the Work as-is and makes no representations or + warranties of any kind concerning the Work, express, implied, + statutory or otherwise, including without limitation warranties of + title, merchantability, fitness for a particular purpose, non + infringement, or the absence of latent or other defects, accuracy, or + the present or absence of errors, whether or not discoverable, all to + the greatest extent permissible under applicable law. + c. Affirmer disclaims responsibility for clearing rights of other persons + that may apply to the Work or any use thereof, including without + limitation any person's Copyright and Related Rights in the Work. + Further, Affirmer disclaims responsibility for obtaining any necessary + consents, permissions or other rights required for any use of the + Work. + d. Affirmer understands and acknowledges that Creative Commons is not a + party to this document and has no duty or obligation with respect to + this CC0 or use of the Work. diff --git a/lib/tzlocal/README.rst b/lib/tzlocal/README.rst new file mode 100644 index 00000000..09e9bfe8 --- /dev/null +++ b/lib/tzlocal/README.rst @@ -0,0 +1,80 @@ +tzlocal +======= + +This Python module returns a `tzinfo` object with the local timezone information under Unix and Win-32. +It requires `pytz`, and returns `pytz` `tzinfo` objects. + +This module attempts to fix a glaring hole in `pytz`, that there is no way to +get the local timezone information, unless you know the zoneinfo name, and +under several Linux distros that's hard or impossible to figure out. + +Also, with Windows different timezone system using pytz isn't of much use +unless you separately configure the zoneinfo timezone name. + +With `tzlocal` you only need to call `get_localzone()` and you will get a +`tzinfo` object with the local time zone info. On some Unices you will still +not get to know what the timezone name is, but you don't need that when you +have the tzinfo file. However, if the timezone name is readily available it +will be used. + + +Supported systems +----------------- + +These are the systems that are in theory supported: + + * Windows 2000 and later + + * Any unix-like system with a /etc/localtime or /usr/local/etc/localtime + +If you have one of the above systems and it does not work, it's a bug. +Please report it. + + +Usage +----- + +Load the local timezone: + + >>> from tzlocal import get_localzone + >>> tz = get_localzone() + >>> tz + + +Create a local datetime: + + >>> from datetime import datetime + >>> dt = tz.localize(datetime.now()) + >>> dt + datetime.datetime(2012, 9, 11, 14, 43, 42, 518871, tzinfo=) + +Lookup another timezone with `pytz`: + + >>> import pytz + >>> eastern = pytz.timezone('US/Eastern') + +Convert the datetime: + + >>> dt.astimezone(eastern) + datetime.datetime(2012, 9, 11, 8, 43, 42, 518871, tzinfo=) + + +Maintainer +---------- + +* Lennart Regebro, regebro@gmail.com + +Contributors +------------ + +* Marc Van Olmen +* Benjamen Meyer +* Manuel Ebert +* Xiaokun Zhu + +(Sorry if I forgot someone) + +License +------- + +* CC0 1.0 Universal http://creativecommons.org/publicdomain/zero/1.0/ diff --git a/lib/tzlocal/__init__.py b/lib/tzlocal/__init__.py new file mode 100644 index 00000000..df7a66b9 --- /dev/null +++ b/lib/tzlocal/__init__.py @@ -0,0 +1,7 @@ +import sys +if sys.platform == 'win32': + from tzlocal.win32 import get_localzone, reload_localzone +elif 'darwin' in sys.platform: + from tzlocal.darwin import get_localzone, reload_localzone +else: + from tzlocal.unix import get_localzone, reload_localzone diff --git a/lib/tzlocal/darwin.py b/lib/tzlocal/darwin.py new file mode 100644 index 00000000..86fd906f --- /dev/null +++ b/lib/tzlocal/darwin.py @@ -0,0 +1,27 @@ +from __future__ import with_statement +import os +import pytz + +_cache_tz = None + +def _get_localzone(): + tzname = os.popen("systemsetup -gettimezone").read().replace("Time Zone: ", "").strip() + if not tzname or tzname not in pytz.all_timezones_set: + # link will be something like /usr/share/zoneinfo/America/Los_Angeles. + link = os.readlink("/etc/localtime") + tzname = link[link.rfind("zoneinfo/") + 9:] + return pytz.timezone(tzname) + +def get_localzone(): + """Get the computers configured local timezone, if any.""" + global _cache_tz + if _cache_tz is None: + _cache_tz = _get_localzone() + return _cache_tz + +def reload_localzone(): + """Reload the cached localzone. You need to call this if the timezone has changed.""" + global _cache_tz + _cache_tz = _get_localzone() + return _cache_tz + diff --git a/lib/tzlocal/tests.py b/lib/tzlocal/tests.py new file mode 100644 index 00000000..49dd0aef --- /dev/null +++ b/lib/tzlocal/tests.py @@ -0,0 +1,64 @@ +import sys +import os +from datetime import datetime +import unittest +import pytz +import tzlocal.unix + +class TzLocalTests(unittest.TestCase): + + def test_env(self): + tz_harare = tzlocal.unix._tz_from_env(':Africa/Harare') + self.assertEqual(tz_harare.zone, 'Africa/Harare') + + # Some Unices allow this as well, so we must allow it: + tz_harare = tzlocal.unix._tz_from_env('Africa/Harare') + self.assertEqual(tz_harare.zone, 'Africa/Harare') + + local_path = os.path.split(__file__)[0] + tz_local = tzlocal.unix._tz_from_env(':' + os.path.join(local_path, 'test_data', 'Harare')) + self.assertEqual(tz_local.zone, 'local') + # Make sure the local timezone is the same as the Harare one above. + # We test this with a past date, so that we don't run into future changes + # of the Harare timezone. + dt = datetime(2012, 1, 1, 5) + self.assertEqual(tz_harare.localize(dt), tz_local.localize(dt)) + + # Non-zoneinfo timezones are not supported in the TZ environment. + self.assertRaises(pytz.UnknownTimeZoneError, tzlocal.unix._tz_from_env, 'GMT+03:00') + + def test_timezone(self): + # Most versions of Ubuntu + local_path = os.path.split(__file__)[0] + tz = tzlocal.unix._get_localzone(_root=os.path.join(local_path, 'test_data', 'timezone')) + self.assertEqual(tz.zone, 'Africa/Harare') + + def test_zone_setting(self): + # A ZONE setting in /etc/sysconfig/clock, f ex CentOS + local_path = os.path.split(__file__)[0] + tz = tzlocal.unix._get_localzone(_root=os.path.join(local_path, 'test_data', 'zone_setting')) + self.assertEqual(tz.zone, 'Africa/Harare') + + def test_timezone_setting(self): + # A ZONE setting in /etc/conf.d/clock, f ex Gentoo + local_path = os.path.split(__file__)[0] + tz = tzlocal.unix._get_localzone(_root=os.path.join(local_path, 'test_data', 'timezone_setting')) + self.assertEqual(tz.zone, 'Africa/Harare') + + def test_only_localtime(self): + local_path = os.path.split(__file__)[0] + tz = tzlocal.unix._get_localzone(_root=os.path.join(local_path, 'test_data', 'localtime')) + self.assertEqual(tz.zone, 'local') + dt = datetime(2012, 1, 1, 5) + self.assertEqual(pytz.timezone('Africa/Harare').localize(dt), tz.localize(dt)) + +if sys.platform == 'win32': + + import tzlocal.win32 + class TzWin32Tests(unittest.TestCase): + + def test_win32(self): + tzlocal.win32.get_localzone() + +if __name__ == '__main__': + unittest.main() diff --git a/lib/tzlocal/unix.py b/lib/tzlocal/unix.py new file mode 100644 index 00000000..76c214dd --- /dev/null +++ b/lib/tzlocal/unix.py @@ -0,0 +1,115 @@ +from __future__ import with_statement +import os +import re +import pytz + +_cache_tz = None + +def _tz_from_env(tzenv): + if tzenv[0] == ':': + tzenv = tzenv[1:] + + # TZ specifies a file + if os.path.exists(tzenv): + with open(tzenv, 'rb') as tzfile: + return pytz.tzfile.build_tzinfo('local', tzfile) + + # TZ specifies a zoneinfo zone. + try: + tz = pytz.timezone(tzenv) + # That worked, so we return this: + return tz + except pytz.UnknownTimeZoneError: + raise pytz.UnknownTimeZoneError( + "tzlocal() does not support non-zoneinfo timezones like %s. \n" + "Please use a timezone in the form of Continent/City") + +def _get_localzone(_root='/'): + """Tries to find the local timezone configuration. + + This method prefers finding the timezone name and passing that to pytz, + over passing in the localtime file, as in the later case the zoneinfo + name is unknown. + + The parameter _root makes the function look for files like /etc/localtime + beneath the _root directory. This is primarily used by the tests. + In normal usage you call the function without parameters.""" + + tzenv = os.environ.get('TZ') + if tzenv: + try: + return _tz_from_env(tzenv) + except pytz.UnknownTimeZoneError: + pass + + # Now look for distribution specific configuration files + # that contain the timezone name. + tzpath = os.path.join(_root, 'etc/timezone') + if os.path.exists(tzpath): + with open(tzpath, 'rb') as tzfile: + data = tzfile.read() + + # Issue #3 was that /etc/timezone was a zoneinfo file. + # That's a misconfiguration, but we need to handle it gracefully: + if data[:5] != 'TZif2': + etctz = data.strip().decode() + # Get rid of host definitions and comments: + if ' ' in etctz: + etctz, dummy = etctz.split(' ', 1) + if '#' in etctz: + etctz, dummy = etctz.split('#', 1) + return pytz.timezone(etctz.replace(' ', '_')) + + # CentOS has a ZONE setting in /etc/sysconfig/clock, + # OpenSUSE has a TIMEZONE setting in /etc/sysconfig/clock and + # Gentoo has a TIMEZONE setting in /etc/conf.d/clock + # We look through these files for a timezone: + + zone_re = re.compile('\s*ZONE\s*=\s*\"') + timezone_re = re.compile('\s*TIMEZONE\s*=\s*\"') + end_re = re.compile('\"') + + for filename in ('etc/sysconfig/clock', 'etc/conf.d/clock'): + tzpath = os.path.join(_root, filename) + if not os.path.exists(tzpath): + continue + with open(tzpath, 'rt') as tzfile: + data = tzfile.readlines() + + for line in data: + # Look for the ZONE= setting. + match = zone_re.match(line) + if match is None: + # No ZONE= setting. Look for the TIMEZONE= setting. + match = timezone_re.match(line) + if match is not None: + # Some setting existed + line = line[match.end():] + etctz = line[:end_re.search(line).start()] + + # We found a timezone + return pytz.timezone(etctz.replace(' ', '_')) + + # No explicit setting existed. Use localtime + for filename in ('etc/localtime', 'usr/local/etc/localtime'): + tzpath = os.path.join(_root, filename) + + if not os.path.exists(tzpath): + continue + with open(tzpath, 'rb') as tzfile: + return pytz.tzfile.build_tzinfo('local', tzfile) + + raise pytz.UnknownTimeZoneError('Can not find any timezone configuration') + +def get_localzone(): + """Get the computers configured local timezone, if any.""" + global _cache_tz + if _cache_tz is None: + _cache_tz = _get_localzone() + return _cache_tz + +def reload_localzone(): + """Reload the cached localzone. You need to call this if the timezone has changed.""" + global _cache_tz + _cache_tz = _get_localzone() + return _cache_tz diff --git a/lib/tzlocal/win32.py b/lib/tzlocal/win32.py new file mode 100644 index 00000000..63445cd7 --- /dev/null +++ b/lib/tzlocal/win32.py @@ -0,0 +1,93 @@ +try: + import _winreg as winreg +except ImportError: + import winreg + +from tzlocal.windows_tz import win_tz +import pytz + +_cache_tz = None + +def valuestodict(key): + """Convert a registry key's values to a dictionary.""" + dict = {} + size = winreg.QueryInfoKey(key)[1] + for i in range(size): + data = winreg.EnumValue(key, i) + dict[data[0]] = data[1] + return dict + +def get_localzone_name(): + # Windows is special. It has unique time zone names (in several + # meanings of the word) available, but unfortunately, they can be + # translated to the language of the operating system, so we need to + # do a backwards lookup, by going through all time zones and see which + # one matches. + handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + + TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation" + localtz = winreg.OpenKey(handle, TZLOCALKEYNAME) + keyvalues = valuestodict(localtz) + localtz.Close() + if 'TimeZoneKeyName' in keyvalues: + # Windows 7 (and Vista?) + + # For some reason this returns a string with loads of NUL bytes at + # least on some systems. I don't know if this is a bug somewhere, I + # just work around it. + tzkeyname = keyvalues['TimeZoneKeyName'].split('\x00', 1)[0] + else: + # Windows 2000 or XP + + # This is the localized name: + tzwin = keyvalues['StandardName'] + + # Open the list of timezones to look up the real name: + TZKEYNAME = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones" + tzkey = winreg.OpenKey(handle, TZKEYNAME) + + # Now, match this value to Time Zone information + tzkeyname = None + for i in range(winreg.QueryInfoKey(tzkey)[0]): + subkey = winreg.EnumKey(tzkey, i) + sub = winreg.OpenKey(tzkey, subkey) + data = valuestodict(sub) + sub.Close() + try: + if data['Std'] == tzwin: + tzkeyname = subkey + break + except KeyError: + # This timezone didn't have proper configuration. + # Ignore it. + pass + + tzkey.Close() + handle.Close() + + if tzkeyname is None: + raise LookupError('Can not find Windows timezone configuration') + + timezone = win_tz.get(tzkeyname) + if timezone is None: + # Nope, that didn't work. Try adding "Standard Time", + # it seems to work a lot of times: + timezone = win_tz.get(tzkeyname + " Standard Time") + + # Return what we have. + if timezone is None: + raise pytz.UnknownTimeZoneError('Can not find timezone ' + tzkeyname) + + return timezone + +def get_localzone(): + """Returns the zoneinfo-based tzinfo object that matches the Windows-configured timezone.""" + global _cache_tz + if _cache_tz is None: + _cache_tz = pytz.timezone(get_localzone_name()) + return _cache_tz + +def reload_localzone(): + """Reload the cached localzone. You need to call this if the timezone has changed.""" + global _cache_tz + _cache_tz = pytz.timezone(get_localzone_name()) diff --git a/lib/tzlocal/windows_tz.py b/lib/tzlocal/windows_tz.py new file mode 100644 index 00000000..0790bb48 --- /dev/null +++ b/lib/tzlocal/windows_tz.py @@ -0,0 +1,542 @@ +# This file is autogenerated by the get_windows_info.py script +# Do not edit. +win_tz = {'AUS Central Standard Time': 'Australia/Darwin', + 'AUS Eastern Standard Time': 'Australia/Sydney', + 'Afghanistan Standard Time': 'Asia/Kabul', + 'Alaskan Standard Time': 'America/Anchorage', + 'Arab Standard Time': 'Asia/Riyadh', + 'Arabian Standard Time': 'Asia/Dubai', + 'Arabic Standard Time': 'Asia/Baghdad', + 'Argentina Standard Time': 'America/Buenos_Aires', + 'Atlantic Standard Time': 'America/Halifax', + 'Azerbaijan Standard Time': 'Asia/Baku', + 'Azores Standard Time': 'Atlantic/Azores', + 'Bahia Standard Time': 'America/Bahia', + 'Bangladesh Standard Time': 'Asia/Dhaka', + 'Belarus Standard Time': 'Europe/Minsk', + 'Canada Central Standard Time': 'America/Regina', + 'Cape Verde Standard Time': 'Atlantic/Cape_Verde', + 'Caucasus Standard Time': 'Asia/Yerevan', + 'Cen. Australia Standard Time': 'Australia/Adelaide', + 'Central America Standard Time': 'America/Guatemala', + 'Central Asia Standard Time': 'Asia/Almaty', + 'Central Brazilian Standard Time': 'America/Cuiaba', + 'Central Europe Standard Time': 'Europe/Budapest', + 'Central European Standard Time': 'Europe/Warsaw', + 'Central Pacific Standard Time': 'Pacific/Guadalcanal', + 'Central Standard Time': 'America/Chicago', + 'Central Standard Time (Mexico)': 'America/Mexico_City', + 'China Standard Time': 'Asia/Shanghai', + 'Dateline Standard Time': 'Etc/GMT+12', + 'E. Africa Standard Time': 'Africa/Nairobi', + 'E. Australia Standard Time': 'Australia/Brisbane', + 'E. South America Standard Time': 'America/Sao_Paulo', + 'Eastern Standard Time': 'America/New_York', + 'Egypt Standard Time': 'Africa/Cairo', + 'Ekaterinburg Standard Time': 'Asia/Yekaterinburg', + 'FLE Standard Time': 'Europe/Kiev', + 'Fiji Standard Time': 'Pacific/Fiji', + 'GMT Standard Time': 'Europe/London', + 'GTB Standard Time': 'Europe/Bucharest', + 'Georgian Standard Time': 'Asia/Tbilisi', + 'Greenland Standard Time': 'America/Godthab', + 'Greenwich Standard Time': 'Atlantic/Reykjavik', + 'Hawaiian Standard Time': 'Pacific/Honolulu', + 'India Standard Time': 'Asia/Calcutta', + 'Iran Standard Time': 'Asia/Tehran', + 'Israel Standard Time': 'Asia/Jerusalem', + 'Jordan Standard Time': 'Asia/Amman', + 'Kaliningrad Standard Time': 'Europe/Kaliningrad', + 'Korea Standard Time': 'Asia/Seoul', + 'Libya Standard Time': 'Africa/Tripoli', + 'Line Islands Standard Time': 'Pacific/Kiritimati', + 'Magadan Standard Time': 'Asia/Magadan', + 'Mauritius Standard Time': 'Indian/Mauritius', + 'Middle East Standard Time': 'Asia/Beirut', + 'Montevideo Standard Time': 'America/Montevideo', + 'Morocco Standard Time': 'Africa/Casablanca', + 'Mountain Standard Time': 'America/Denver', + 'Mountain Standard Time (Mexico)': 'America/Chihuahua', + 'Myanmar Standard Time': 'Asia/Rangoon', + 'N. Central Asia Standard Time': 'Asia/Novosibirsk', + 'Namibia Standard Time': 'Africa/Windhoek', + 'Nepal Standard Time': 'Asia/Katmandu', + 'New Zealand Standard Time': 'Pacific/Auckland', + 'Newfoundland Standard Time': 'America/St_Johns', + 'North Asia East Standard Time': 'Asia/Irkutsk', + 'North Asia Standard Time': 'Asia/Krasnoyarsk', + 'Pacific SA Standard Time': 'America/Santiago', + 'Pacific Standard Time': 'America/Los_Angeles', + 'Pacific Standard Time (Mexico)': 'America/Santa_Isabel', + 'Pakistan Standard Time': 'Asia/Karachi', + 'Paraguay Standard Time': 'America/Asuncion', + 'Romance Standard Time': 'Europe/Paris', + 'Russia Time Zone 10': 'Asia/Srednekolymsk', + 'Russia Time Zone 11': 'Asia/Kamchatka', + 'Russia Time Zone 3': 'Europe/Samara', + 'Russian Standard Time': 'Europe/Moscow', + 'SA Eastern Standard Time': 'America/Cayenne', + 'SA Pacific Standard Time': 'America/Bogota', + 'SA Western Standard Time': 'America/La_Paz', + 'SE Asia Standard Time': 'Asia/Bangkok', + 'Samoa Standard Time': 'Pacific/Apia', + 'Singapore Standard Time': 'Asia/Singapore', + 'South Africa Standard Time': 'Africa/Johannesburg', + 'Sri Lanka Standard Time': 'Asia/Colombo', + 'Syria Standard Time': 'Asia/Damascus', + 'Taipei Standard Time': 'Asia/Taipei', + 'Tasmania Standard Time': 'Australia/Hobart', + 'Tokyo Standard Time': 'Asia/Tokyo', + 'Tonga Standard Time': 'Pacific/Tongatapu', + 'Turkey Standard Time': 'Europe/Istanbul', + 'US Eastern Standard Time': 'America/Indianapolis', + 'US Mountain Standard Time': 'America/Phoenix', + 'UTC': 'Etc/GMT', + 'UTC+12': 'Etc/GMT-12', + 'UTC-02': 'Etc/GMT+2', + 'UTC-11': 'Etc/GMT+11', + 'Ulaanbaatar Standard Time': 'Asia/Ulaanbaatar', + 'Venezuela Standard Time': 'America/Caracas', + 'Vladivostok Standard Time': 'Asia/Vladivostok', + 'W. Australia Standard Time': 'Australia/Perth', + 'W. Central Africa Standard Time': 'Africa/Lagos', + 'W. Europe Standard Time': 'Europe/Berlin', + 'West Asia Standard Time': 'Asia/Tashkent', + 'West Pacific Standard Time': 'Pacific/Port_Moresby', + 'Yakutsk Standard Time': 'Asia/Yakutsk'} + +# Old name for the win_tz variable: +tz_names = win_tz + +tz_win = {'Africa/Abidjan': 'Greenwich Standard Time', + 'Africa/Accra': 'Greenwich Standard Time', + 'Africa/Addis_Ababa': 'E. Africa Standard Time', + 'Africa/Algiers': 'W. Central Africa Standard Time', + 'Africa/Asmera': 'E. Africa Standard Time', + 'Africa/Bamako': 'Greenwich Standard Time', + 'Africa/Bangui': 'W. Central Africa Standard Time', + 'Africa/Banjul': 'Greenwich Standard Time', + 'Africa/Bissau': 'Greenwich Standard Time', + 'Africa/Blantyre': 'South Africa Standard Time', + 'Africa/Brazzaville': 'W. Central Africa Standard Time', + 'Africa/Bujumbura': 'South Africa Standard Time', + 'Africa/Cairo': 'Egypt Standard Time', + 'Africa/Casablanca': 'Morocco Standard Time', + 'Africa/Ceuta': 'Romance Standard Time', + 'Africa/Conakry': 'Greenwich Standard Time', + 'Africa/Dakar': 'Greenwich Standard Time', + 'Africa/Dar_es_Salaam': 'E. Africa Standard Time', + 'Africa/Djibouti': 'E. Africa Standard Time', + 'Africa/Douala': 'W. Central Africa Standard Time', + 'Africa/El_Aaiun': 'Morocco Standard Time', + 'Africa/Freetown': 'Greenwich Standard Time', + 'Africa/Gaborone': 'South Africa Standard Time', + 'Africa/Harare': 'South Africa Standard Time', + 'Africa/Johannesburg': 'South Africa Standard Time', + 'Africa/Juba': 'E. Africa Standard Time', + 'Africa/Kampala': 'E. Africa Standard Time', + 'Africa/Khartoum': 'E. Africa Standard Time', + 'Africa/Kigali': 'South Africa Standard Time', + 'Africa/Kinshasa': 'W. Central Africa Standard Time', + 'Africa/Lagos': 'W. Central Africa Standard Time', + 'Africa/Libreville': 'W. Central Africa Standard Time', + 'Africa/Lome': 'Greenwich Standard Time', + 'Africa/Luanda': 'W. Central Africa Standard Time', + 'Africa/Lubumbashi': 'South Africa Standard Time', + 'Africa/Lusaka': 'South Africa Standard Time', + 'Africa/Malabo': 'W. Central Africa Standard Time', + 'Africa/Maputo': 'South Africa Standard Time', + 'Africa/Maseru': 'South Africa Standard Time', + 'Africa/Mbabane': 'South Africa Standard Time', + 'Africa/Mogadishu': 'E. Africa Standard Time', + 'Africa/Monrovia': 'Greenwich Standard Time', + 'Africa/Nairobi': 'E. Africa Standard Time', + 'Africa/Ndjamena': 'W. Central Africa Standard Time', + 'Africa/Niamey': 'W. Central Africa Standard Time', + 'Africa/Nouakchott': 'Greenwich Standard Time', + 'Africa/Ouagadougou': 'Greenwich Standard Time', + 'Africa/Porto-Novo': 'W. Central Africa Standard Time', + 'Africa/Sao_Tome': 'Greenwich Standard Time', + 'Africa/Tripoli': 'Libya Standard Time', + 'Africa/Tunis': 'W. Central Africa Standard Time', + 'Africa/Windhoek': 'Namibia Standard Time', + 'America/Anchorage': 'Alaskan Standard Time', + 'America/Anguilla': 'SA Western Standard Time', + 'America/Antigua': 'SA Western Standard Time', + 'America/Araguaina': 'SA Eastern Standard Time', + 'America/Argentina/La_Rioja': 'Argentina Standard Time', + 'America/Argentina/Rio_Gallegos': 'Argentina Standard Time', + 'America/Argentina/Salta': 'Argentina Standard Time', + 'America/Argentina/San_Juan': 'Argentina Standard Time', + 'America/Argentina/San_Luis': 'Argentina Standard Time', + 'America/Argentina/Tucuman': 'Argentina Standard Time', + 'America/Argentina/Ushuaia': 'Argentina Standard Time', + 'America/Aruba': 'SA Western Standard Time', + 'America/Asuncion': 'Paraguay Standard Time', + 'America/Bahia': 'Bahia Standard Time', + 'America/Bahia_Banderas': 'Central Standard Time (Mexico)', + 'America/Barbados': 'SA Western Standard Time', + 'America/Belem': 'SA Eastern Standard Time', + 'America/Belize': 'Central America Standard Time', + 'America/Blanc-Sablon': 'SA Western Standard Time', + 'America/Boa_Vista': 'SA Western Standard Time', + 'America/Bogota': 'SA Pacific Standard Time', + 'America/Boise': 'Mountain Standard Time', + 'America/Buenos_Aires': 'Argentina Standard Time', + 'America/Cambridge_Bay': 'Mountain Standard Time', + 'America/Campo_Grande': 'Central Brazilian Standard Time', + 'America/Cancun': 'Central Standard Time (Mexico)', + 'America/Caracas': 'Venezuela Standard Time', + 'America/Catamarca': 'Argentina Standard Time', + 'America/Cayenne': 'SA Eastern Standard Time', + 'America/Cayman': 'SA Pacific Standard Time', + 'America/Chicago': 'Central Standard Time', + 'America/Chihuahua': 'Mountain Standard Time (Mexico)', + 'America/Coral_Harbour': 'SA Pacific Standard Time', + 'America/Cordoba': 'Argentina Standard Time', + 'America/Costa_Rica': 'Central America Standard Time', + 'America/Creston': 'US Mountain Standard Time', + 'America/Cuiaba': 'Central Brazilian Standard Time', + 'America/Curacao': 'SA Western Standard Time', + 'America/Danmarkshavn': 'UTC', + 'America/Dawson': 'Pacific Standard Time', + 'America/Dawson_Creek': 'US Mountain Standard Time', + 'America/Denver': 'Mountain Standard Time', + 'America/Detroit': 'Eastern Standard Time', + 'America/Dominica': 'SA Western Standard Time', + 'America/Edmonton': 'Mountain Standard Time', + 'America/Eirunepe': 'SA Pacific Standard Time', + 'America/El_Salvador': 'Central America Standard Time', + 'America/Fortaleza': 'SA Eastern Standard Time', + 'America/Glace_Bay': 'Atlantic Standard Time', + 'America/Godthab': 'Greenland Standard Time', + 'America/Goose_Bay': 'Atlantic Standard Time', + 'America/Grand_Turk': 'SA Western Standard Time', + 'America/Grenada': 'SA Western Standard Time', + 'America/Guadeloupe': 'SA Western Standard Time', + 'America/Guatemala': 'Central America Standard Time', + 'America/Guayaquil': 'SA Pacific Standard Time', + 'America/Guyana': 'SA Western Standard Time', + 'America/Halifax': 'Atlantic Standard Time', + 'America/Havana': 'Eastern Standard Time', + 'America/Hermosillo': 'US Mountain Standard Time', + 'America/Indiana/Knox': 'Central Standard Time', + 'America/Indiana/Marengo': 'US Eastern Standard Time', + 'America/Indiana/Petersburg': 'Eastern Standard Time', + 'America/Indiana/Tell_City': 'Central Standard Time', + 'America/Indiana/Vevay': 'US Eastern Standard Time', + 'America/Indiana/Vincennes': 'Eastern Standard Time', + 'America/Indiana/Winamac': 'Eastern Standard Time', + 'America/Indianapolis': 'US Eastern Standard Time', + 'America/Inuvik': 'Mountain Standard Time', + 'America/Iqaluit': 'Eastern Standard Time', + 'America/Jamaica': 'SA Pacific Standard Time', + 'America/Jujuy': 'Argentina Standard Time', + 'America/Juneau': 'Alaskan Standard Time', + 'America/Kentucky/Monticello': 'Eastern Standard Time', + 'America/Kralendijk': 'SA Western Standard Time', + 'America/La_Paz': 'SA Western Standard Time', + 'America/Lima': 'SA Pacific Standard Time', + 'America/Los_Angeles': 'Pacific Standard Time', + 'America/Louisville': 'Eastern Standard Time', + 'America/Lower_Princes': 'SA Western Standard Time', + 'America/Maceio': 'SA Eastern Standard Time', + 'America/Managua': 'Central America Standard Time', + 'America/Manaus': 'SA Western Standard Time', + 'America/Marigot': 'SA Western Standard Time', + 'America/Martinique': 'SA Western Standard Time', + 'America/Matamoros': 'Central Standard Time', + 'America/Mazatlan': 'Mountain Standard Time (Mexico)', + 'America/Mendoza': 'Argentina Standard Time', + 'America/Menominee': 'Central Standard Time', + 'America/Merida': 'Central Standard Time (Mexico)', + 'America/Mexico_City': 'Central Standard Time (Mexico)', + 'America/Moncton': 'Atlantic Standard Time', + 'America/Monterrey': 'Central Standard Time (Mexico)', + 'America/Montevideo': 'Montevideo Standard Time', + 'America/Montreal': 'Eastern Standard Time', + 'America/Montserrat': 'SA Western Standard Time', + 'America/Nassau': 'Eastern Standard Time', + 'America/New_York': 'Eastern Standard Time', + 'America/Nipigon': 'Eastern Standard Time', + 'America/Nome': 'Alaskan Standard Time', + 'America/Noronha': 'UTC-02', + 'America/North_Dakota/Beulah': 'Central Standard Time', + 'America/North_Dakota/Center': 'Central Standard Time', + 'America/North_Dakota/New_Salem': 'Central Standard Time', + 'America/Ojinaga': 'Mountain Standard Time', + 'America/Panama': 'SA Pacific Standard Time', + 'America/Pangnirtung': 'Eastern Standard Time', + 'America/Paramaribo': 'SA Eastern Standard Time', + 'America/Phoenix': 'US Mountain Standard Time', + 'America/Port-au-Prince': 'Eastern Standard Time', + 'America/Port_of_Spain': 'SA Western Standard Time', + 'America/Porto_Velho': 'SA Western Standard Time', + 'America/Puerto_Rico': 'SA Western Standard Time', + 'America/Rainy_River': 'Central Standard Time', + 'America/Rankin_Inlet': 'Central Standard Time', + 'America/Recife': 'SA Eastern Standard Time', + 'America/Regina': 'Canada Central Standard Time', + 'America/Resolute': 'Central Standard Time', + 'America/Rio_Branco': 'SA Pacific Standard Time', + 'America/Santa_Isabel': 'Pacific Standard Time (Mexico)', + 'America/Santarem': 'SA Eastern Standard Time', + 'America/Santiago': 'Pacific SA Standard Time', + 'America/Santo_Domingo': 'SA Western Standard Time', + 'America/Sao_Paulo': 'E. South America Standard Time', + 'America/Scoresbysund': 'Azores Standard Time', + 'America/Sitka': 'Alaskan Standard Time', + 'America/St_Barthelemy': 'SA Western Standard Time', + 'America/St_Johns': 'Newfoundland Standard Time', + 'America/St_Kitts': 'SA Western Standard Time', + 'America/St_Lucia': 'SA Western Standard Time', + 'America/St_Thomas': 'SA Western Standard Time', + 'America/St_Vincent': 'SA Western Standard Time', + 'America/Swift_Current': 'Canada Central Standard Time', + 'America/Tegucigalpa': 'Central America Standard Time', + 'America/Thule': 'Atlantic Standard Time', + 'America/Thunder_Bay': 'Eastern Standard Time', + 'America/Tijuana': 'Pacific Standard Time', + 'America/Toronto': 'Eastern Standard Time', + 'America/Tortola': 'SA Western Standard Time', + 'America/Vancouver': 'Pacific Standard Time', + 'America/Whitehorse': 'Pacific Standard Time', + 'America/Winnipeg': 'Central Standard Time', + 'America/Yakutat': 'Alaskan Standard Time', + 'America/Yellowknife': 'Mountain Standard Time', + 'Antarctica/Casey': 'W. Australia Standard Time', + 'Antarctica/Davis': 'SE Asia Standard Time', + 'Antarctica/DumontDUrville': 'West Pacific Standard Time', + 'Antarctica/Macquarie': 'Central Pacific Standard Time', + 'Antarctica/Mawson': 'West Asia Standard Time', + 'Antarctica/McMurdo': 'New Zealand Standard Time', + 'Antarctica/Palmer': 'Pacific SA Standard Time', + 'Antarctica/Rothera': 'SA Eastern Standard Time', + 'Antarctica/Syowa': 'E. Africa Standard Time', + 'Antarctica/Vostok': 'Central Asia Standard Time', + 'Arctic/Longyearbyen': 'W. Europe Standard Time', + 'Asia/Aden': 'Arab Standard Time', + 'Asia/Almaty': 'Central Asia Standard Time', + 'Asia/Amman': 'Jordan Standard Time', + 'Asia/Anadyr': 'Russia Time Zone 11', + 'Asia/Aqtau': 'West Asia Standard Time', + 'Asia/Aqtobe': 'West Asia Standard Time', + 'Asia/Ashgabat': 'West Asia Standard Time', + 'Asia/Baghdad': 'Arabic Standard Time', + 'Asia/Bahrain': 'Arab Standard Time', + 'Asia/Baku': 'Azerbaijan Standard Time', + 'Asia/Bangkok': 'SE Asia Standard Time', + 'Asia/Beirut': 'Middle East Standard Time', + 'Asia/Bishkek': 'Central Asia Standard Time', + 'Asia/Brunei': 'Singapore Standard Time', + 'Asia/Calcutta': 'India Standard Time', + 'Asia/Chita': 'North Asia East Standard Time', + 'Asia/Choibalsan': 'Ulaanbaatar Standard Time', + 'Asia/Colombo': 'Sri Lanka Standard Time', + 'Asia/Damascus': 'Syria Standard Time', + 'Asia/Dhaka': 'Bangladesh Standard Time', + 'Asia/Dili': 'Tokyo Standard Time', + 'Asia/Dubai': 'Arabian Standard Time', + 'Asia/Dushanbe': 'West Asia Standard Time', + 'Asia/Hong_Kong': 'China Standard Time', + 'Asia/Hovd': 'SE Asia Standard Time', + 'Asia/Irkutsk': 'North Asia East Standard Time', + 'Asia/Jakarta': 'SE Asia Standard Time', + 'Asia/Jayapura': 'Tokyo Standard Time', + 'Asia/Jerusalem': 'Israel Standard Time', + 'Asia/Kabul': 'Afghanistan Standard Time', + 'Asia/Kamchatka': 'Russia Time Zone 11', + 'Asia/Karachi': 'Pakistan Standard Time', + 'Asia/Katmandu': 'Nepal Standard Time', + 'Asia/Khandyga': 'Yakutsk Standard Time', + 'Asia/Krasnoyarsk': 'North Asia Standard Time', + 'Asia/Kuala_Lumpur': 'Singapore Standard Time', + 'Asia/Kuching': 'Singapore Standard Time', + 'Asia/Kuwait': 'Arab Standard Time', + 'Asia/Macau': 'China Standard Time', + 'Asia/Magadan': 'Magadan Standard Time', + 'Asia/Makassar': 'Singapore Standard Time', + 'Asia/Manila': 'Singapore Standard Time', + 'Asia/Muscat': 'Arabian Standard Time', + 'Asia/Nicosia': 'GTB Standard Time', + 'Asia/Novokuznetsk': 'North Asia Standard Time', + 'Asia/Novosibirsk': 'N. Central Asia Standard Time', + 'Asia/Omsk': 'N. Central Asia Standard Time', + 'Asia/Oral': 'West Asia Standard Time', + 'Asia/Phnom_Penh': 'SE Asia Standard Time', + 'Asia/Pontianak': 'SE Asia Standard Time', + 'Asia/Pyongyang': 'Korea Standard Time', + 'Asia/Qatar': 'Arab Standard Time', + 'Asia/Qyzylorda': 'Central Asia Standard Time', + 'Asia/Rangoon': 'Myanmar Standard Time', + 'Asia/Riyadh': 'Arab Standard Time', + 'Asia/Saigon': 'SE Asia Standard Time', + 'Asia/Sakhalin': 'Vladivostok Standard Time', + 'Asia/Samarkand': 'West Asia Standard Time', + 'Asia/Seoul': 'Korea Standard Time', + 'Asia/Shanghai': 'China Standard Time', + 'Asia/Singapore': 'Singapore Standard Time', + 'Asia/Srednekolymsk': 'Russia Time Zone 10', + 'Asia/Taipei': 'Taipei Standard Time', + 'Asia/Tashkent': 'West Asia Standard Time', + 'Asia/Tbilisi': 'Georgian Standard Time', + 'Asia/Tehran': 'Iran Standard Time', + 'Asia/Thimphu': 'Bangladesh Standard Time', + 'Asia/Tokyo': 'Tokyo Standard Time', + 'Asia/Ulaanbaatar': 'Ulaanbaatar Standard Time', + 'Asia/Urumqi': 'Central Asia Standard Time', + 'Asia/Ust-Nera': 'Vladivostok Standard Time', + 'Asia/Vientiane': 'SE Asia Standard Time', + 'Asia/Vladivostok': 'Vladivostok Standard Time', + 'Asia/Yakutsk': 'Yakutsk Standard Time', + 'Asia/Yekaterinburg': 'Ekaterinburg Standard Time', + 'Asia/Yerevan': 'Caucasus Standard Time', + 'Atlantic/Azores': 'Azores Standard Time', + 'Atlantic/Bermuda': 'Atlantic Standard Time', + 'Atlantic/Canary': 'GMT Standard Time', + 'Atlantic/Cape_Verde': 'Cape Verde Standard Time', + 'Atlantic/Faeroe': 'GMT Standard Time', + 'Atlantic/Madeira': 'GMT Standard Time', + 'Atlantic/Reykjavik': 'Greenwich Standard Time', + 'Atlantic/South_Georgia': 'UTC-02', + 'Atlantic/St_Helena': 'Greenwich Standard Time', + 'Atlantic/Stanley': 'SA Eastern Standard Time', + 'Australia/Adelaide': 'Cen. Australia Standard Time', + 'Australia/Brisbane': 'E. Australia Standard Time', + 'Australia/Broken_Hill': 'Cen. Australia Standard Time', + 'Australia/Currie': 'Tasmania Standard Time', + 'Australia/Darwin': 'AUS Central Standard Time', + 'Australia/Hobart': 'Tasmania Standard Time', + 'Australia/Lindeman': 'E. Australia Standard Time', + 'Australia/Melbourne': 'AUS Eastern Standard Time', + 'Australia/Perth': 'W. Australia Standard Time', + 'Australia/Sydney': 'AUS Eastern Standard Time', + 'CST6CDT': 'Central Standard Time', + 'EST5EDT': 'Eastern Standard Time', + 'Etc/GMT': 'UTC', + 'Etc/GMT+1': 'Cape Verde Standard Time', + 'Etc/GMT+10': 'Hawaiian Standard Time', + 'Etc/GMT+11': 'UTC-11', + 'Etc/GMT+12': 'Dateline Standard Time', + 'Etc/GMT+2': 'UTC-02', + 'Etc/GMT+3': 'SA Eastern Standard Time', + 'Etc/GMT+4': 'SA Western Standard Time', + 'Etc/GMT+5': 'SA Pacific Standard Time', + 'Etc/GMT+6': 'Central America Standard Time', + 'Etc/GMT+7': 'US Mountain Standard Time', + 'Etc/GMT-1': 'W. Central Africa Standard Time', + 'Etc/GMT-10': 'West Pacific Standard Time', + 'Etc/GMT-11': 'Central Pacific Standard Time', + 'Etc/GMT-12': 'UTC+12', + 'Etc/GMT-13': 'Tonga Standard Time', + 'Etc/GMT-14': 'Line Islands Standard Time', + 'Etc/GMT-2': 'South Africa Standard Time', + 'Etc/GMT-3': 'E. Africa Standard Time', + 'Etc/GMT-4': 'Arabian Standard Time', + 'Etc/GMT-5': 'West Asia Standard Time', + 'Etc/GMT-6': 'Central Asia Standard Time', + 'Etc/GMT-7': 'SE Asia Standard Time', + 'Etc/GMT-8': 'Singapore Standard Time', + 'Etc/GMT-9': 'Tokyo Standard Time', + 'Etc/UTC': 'UTC', + 'Europe/Amsterdam': 'W. Europe Standard Time', + 'Europe/Andorra': 'W. Europe Standard Time', + 'Europe/Athens': 'GTB Standard Time', + 'Europe/Belgrade': 'Central Europe Standard Time', + 'Europe/Berlin': 'W. Europe Standard Time', + 'Europe/Bratislava': 'Central Europe Standard Time', + 'Europe/Brussels': 'Romance Standard Time', + 'Europe/Bucharest': 'GTB Standard Time', + 'Europe/Budapest': 'Central Europe Standard Time', + 'Europe/Busingen': 'W. Europe Standard Time', + 'Europe/Chisinau': 'GTB Standard Time', + 'Europe/Copenhagen': 'Romance Standard Time', + 'Europe/Dublin': 'GMT Standard Time', + 'Europe/Gibraltar': 'W. Europe Standard Time', + 'Europe/Guernsey': 'GMT Standard Time', + 'Europe/Helsinki': 'FLE Standard Time', + 'Europe/Isle_of_Man': 'GMT Standard Time', + 'Europe/Istanbul': 'Turkey Standard Time', + 'Europe/Jersey': 'GMT Standard Time', + 'Europe/Kaliningrad': 'Kaliningrad Standard Time', + 'Europe/Kiev': 'FLE Standard Time', + 'Europe/Lisbon': 'GMT Standard Time', + 'Europe/Ljubljana': 'Central Europe Standard Time', + 'Europe/London': 'GMT Standard Time', + 'Europe/Luxembourg': 'W. Europe Standard Time', + 'Europe/Madrid': 'Romance Standard Time', + 'Europe/Malta': 'W. Europe Standard Time', + 'Europe/Mariehamn': 'FLE Standard Time', + 'Europe/Minsk': 'Belarus Standard Time', + 'Europe/Monaco': 'W. Europe Standard Time', + 'Europe/Moscow': 'Russian Standard Time', + 'Europe/Oslo': 'W. Europe Standard Time', + 'Europe/Paris': 'Romance Standard Time', + 'Europe/Podgorica': 'Central Europe Standard Time', + 'Europe/Prague': 'Central Europe Standard Time', + 'Europe/Riga': 'FLE Standard Time', + 'Europe/Rome': 'W. Europe Standard Time', + 'Europe/Samara': 'Russia Time Zone 3', + 'Europe/San_Marino': 'W. Europe Standard Time', + 'Europe/Sarajevo': 'Central European Standard Time', + 'Europe/Simferopol': 'Russian Standard Time', + 'Europe/Skopje': 'Central European Standard Time', + 'Europe/Sofia': 'FLE Standard Time', + 'Europe/Stockholm': 'W. Europe Standard Time', + 'Europe/Tallinn': 'FLE Standard Time', + 'Europe/Tirane': 'Central Europe Standard Time', + 'Europe/Uzhgorod': 'FLE Standard Time', + 'Europe/Vaduz': 'W. Europe Standard Time', + 'Europe/Vatican': 'W. Europe Standard Time', + 'Europe/Vienna': 'W. Europe Standard Time', + 'Europe/Vilnius': 'FLE Standard Time', + 'Europe/Volgograd': 'Russian Standard Time', + 'Europe/Warsaw': 'Central European Standard Time', + 'Europe/Zagreb': 'Central European Standard Time', + 'Europe/Zaporozhye': 'FLE Standard Time', + 'Europe/Zurich': 'W. Europe Standard Time', + 'Indian/Antananarivo': 'E. Africa Standard Time', + 'Indian/Chagos': 'Central Asia Standard Time', + 'Indian/Christmas': 'SE Asia Standard Time', + 'Indian/Cocos': 'Myanmar Standard Time', + 'Indian/Comoro': 'E. Africa Standard Time', + 'Indian/Kerguelen': 'West Asia Standard Time', + 'Indian/Mahe': 'Mauritius Standard Time', + 'Indian/Maldives': 'West Asia Standard Time', + 'Indian/Mauritius': 'Mauritius Standard Time', + 'Indian/Mayotte': 'E. Africa Standard Time', + 'Indian/Reunion': 'Mauritius Standard Time', + 'MST7MDT': 'Mountain Standard Time', + 'PST8PDT': 'Pacific Standard Time', + 'Pacific/Apia': 'Samoa Standard Time', + 'Pacific/Auckland': 'New Zealand Standard Time', + 'Pacific/Efate': 'Central Pacific Standard Time', + 'Pacific/Enderbury': 'Tonga Standard Time', + 'Pacific/Fakaofo': 'Tonga Standard Time', + 'Pacific/Fiji': 'Fiji Standard Time', + 'Pacific/Funafuti': 'UTC+12', + 'Pacific/Galapagos': 'Central America Standard Time', + 'Pacific/Guadalcanal': 'Central Pacific Standard Time', + 'Pacific/Guam': 'West Pacific Standard Time', + 'Pacific/Honolulu': 'Hawaiian Standard Time', + 'Pacific/Johnston': 'Hawaiian Standard Time', + 'Pacific/Kiritimati': 'Line Islands Standard Time', + 'Pacific/Kosrae': 'Central Pacific Standard Time', + 'Pacific/Kwajalein': 'UTC+12', + 'Pacific/Majuro': 'UTC+12', + 'Pacific/Midway': 'UTC-11', + 'Pacific/Nauru': 'UTC+12', + 'Pacific/Niue': 'UTC-11', + 'Pacific/Noumea': 'Central Pacific Standard Time', + 'Pacific/Pago_Pago': 'UTC-11', + 'Pacific/Palau': 'Tokyo Standard Time', + 'Pacific/Ponape': 'Central Pacific Standard Time', + 'Pacific/Port_Moresby': 'West Pacific Standard Time', + 'Pacific/Rarotonga': 'Hawaiian Standard Time', + 'Pacific/Saipan': 'West Pacific Standard Time', + 'Pacific/Tahiti': 'Hawaiian Standard Time', + 'Pacific/Tarawa': 'UTC+12', + 'Pacific/Tongatapu': 'Tonga Standard Time', + 'Pacific/Truk': 'West Pacific Standard Time', + 'Pacific/Wake': 'UTC+12', + 'Pacific/Wallis': 'UTC+12'} diff --git a/mylar/PostProcessor.py b/mylar/PostProcessor.py index 9bc57312..a55c696f 100755 --- a/mylar/PostProcessor.py +++ b/mylar/PostProcessor.py @@ -68,6 +68,8 @@ class PostProcessor(object): self.fileop = shutil.move self.valreturn = [] + self.extensions = ('.cbr', '.cbz', '.pdf') + self.failed_files = 0 self.log = '' def _log(self, message, level=logger.message): #level=logger.MESSAGE): @@ -165,10 +167,10 @@ class PostProcessor(object): def duplicate_process(self, dupeinfo): #path to move 'should' be the entire path to the given file - path_to_move = dupeinfo[0]['to_dupe'] + path_to_move = dupeinfo['to_dupe'] file_to_move = os.path.split(path_to_move)[1] - if dupeinfo[0]['action'] == 'dupe_src' and mylar.FILE_OPTS == 'move': + if dupeinfo['action'] == 'dupe_src' and mylar.FILE_OPTS == 'move': logger.info('[DUPLICATE-CLEANUP] New File will be post-processed. Moving duplicate [' + path_to_move + '] to Duplicate Dump Folder for manual intervention.') else: if mylar.FILE_OPTS == 'move': @@ -204,7 +206,7 @@ class PostProcessor(object): orig_folder = sub_path else: orig_folder = self.nzb_folder - + #make sure we don't delete the directory passed via manual-pp and ajust for trailling slashes or not if orig_folder.endswith('/') or orig_folder.endswith('\\'): tmp_folder = orig_folder[:-1] @@ -305,6 +307,8 @@ class PostProcessor(object): logger.fdebug(module + ' NZBGET Download folder option enabled. Directory set to : ' + self.nzb_folder) myDB = db.DBConnection() + self.oneoffinlist = False + if self.nzb_name == 'Manual Run': logger.fdebug (module + ' Manual Run initiated') #Manual postprocessing on a folder. @@ -313,7 +317,7 @@ class PostProcessor(object): filelist = flc.listFiles() if filelist['comiccount'] == 0: # is None: logger.warn('There were no files located - check the debugging logs if you think this is in error.') - return + return logger.info('I have located ' + str(filelist['comiccount']) + ' files that I should be able to post-process. Continuing...') #load the hashes for torrents so continual post-processing of same issues don't occur. @@ -490,7 +494,7 @@ class PostProcessor(object): issyr = None #logger.fdebug(module + ' issuedate:' + str(issuechk['IssueDate'])) #logger.fdebug(module + ' issuechk: ' + str(issuechk['IssueDate'][5:7])) - + #logger.info(module + ' ReleaseDate: ' + str(issuechk['ReleaseDate'])) #logger.info(module + ' IssueDate: ' + str(issuechk['IssueDate'])) if issuechk['ReleaseDate'] is not None and issuechk['ReleaseDate'] != '0000-00-00': @@ -550,7 +554,7 @@ class PostProcessor(object): datematch = "False" elif len(watchvals) == 1 and int(tmp_watchlist_vol) == 1: logger.fdebug(module + '[ISSUE-VERIFY][Lone Volume MATCH] Volume label of ' + str(watch_values['ComicVersion']) + ' indicates only volume for this series on your watchlist.') - elif int(tmp_watchlist_vol) > 1: + elif int(tmp_watchlist_vol) > 1: logger.fdebug(module + '[ISSUE-VERIFY][Lone Volume FAILURE] Volume label of ' + str(watch_values['ComicVersion']) + ' indicates that there is more than one volume for this series, but the one on your watchlist has no volume label set.') datematch = "False" @@ -558,7 +562,7 @@ class PostProcessor(object): #now we see if the issue year matches exactly to what we have within Mylar. if int(watch_issueyear) == int(watchmatch['issue_year']): logger.fdebug(module + '[ISSUE-VERIFY][Issue Year MATCH] Issue Year of ' + str(watch_issueyear) + ' is a match to the year found in the filename of : ' + str(watchmatch['issue_year'])) - datematch = 'True' + datematch = 'True' else: logger.fdebug(module + '[ISSUE-VERIFY][Issue Year FAILURE] Issue Year of ' + str(watch_issueyear) + ' does NOT match the year found in the filename of : ' + str(watchmatch['issue_year'])) logger.fdebug(module + '[ISSUE-VERIFY] Checking against complete date to see if month published could allow for different publication year.') @@ -578,7 +582,8 @@ class PostProcessor(object): "ComicID": cs['ComicID'], "IssueID": issuechk['IssueID'], "IssueNumber": issuechk['Issue_Number'], - "ComicName": cs['ComicName']}) + "ComicName": cs['ComicName'], + "One-Off": False}) else: logger.fdebug(module + '[NON-MATCH: ' + cs['ComicName'] + '-' + cs['ComicID'] + '] Incorrect series - not populating..continuing post-processing') continue @@ -592,7 +597,7 @@ class PostProcessor(object): #we should setup for manual post-processing of story-arc issues here #we can also search by ComicID to just grab those particular arcs as an alternative as well (not done) - + #as_d = filechecker.FileChecker() #as_dinfo = as_d.dynamic_replace(helpers.conversion(fl['series_name'])) #mod_seriesname = as_dinfo['mod_seriesname'] @@ -801,7 +806,7 @@ class PostProcessor(object): if metaresponse == "fail": logger.fdebug(module + ' Unable to write metadata successfully - check mylar.log file. Attempting to continue without metatagging...') - elif metaresponse == "unrar error": + elif any([metaresponse == "unrar error", metaresponse == "corrupt"]): logger.error(module + ' This is a corrupt archive - whether CRC errors or it is incomplete. Marking as BAD, and retrying it.') continue #launch failed download handling here. @@ -843,10 +848,10 @@ class PostProcessor(object): if renamed_file: dfilename = renamed_file['nfilename'] logger.fdebug(module + ' Renaming file to conform to configuration: ' + ofilename) - + #if from a StoryArc, check to see if we're appending the ReadingOrder to the filename if mylar.READ2FILENAME: - + logger.fdebug(module + ' readingorder#: ' + str(ml['ReadingOrder'])) if int(ml['ReadingOrder']) < 10: readord = "00" + str(ml['ReadingOrder']) elif int(ml['ReadingOrder']) >= 10 and int(ml['ReadingOrder']) <= 99: readord = "0" + str(ml['ReadingOrder']) @@ -879,7 +884,7 @@ class PostProcessor(object): #IssArcID = 'S' + str(ml['IssueArcID']) myDB.action('DELETE from nzblog WHERE IssueID=? AND SARC=?', ['S' + str(ml['IssueArcID']),ml['StoryArc']]) myDB.action('DELETE from nzblog WHERE IssueID=? AND SARC=?', [ml['IssueArcID'],ml['StoryArc']]) - + logger.fdebug(module + ' IssueArcID: ' + str(ml['IssueArcID'])) ctrlVal = {"IssueArcID": ml['IssueArcID']} newVal = {"Status": "Downloaded", @@ -889,315 +894,402 @@ class PostProcessor(object): logger.fdebug(module + ' [' + ml['StoryArc'] + '] Post-Processing completed for: ' + grab_dst) - else: - nzbname = self.nzb_name - #remove extensions from nzb_name if they somehow got through (Experimental most likely) - extensions = ('.cbr', '.cbz') - - if nzbname.lower().endswith(extensions): - fd, ext = os.path.splitext(nzbname) - self._log("Removed extension from nzb: " + ext) - nzbname = re.sub(str(ext), '', str(nzbname)) - - #replace spaces - # let's change all space to decimals for simplicity - logger.fdebug('[NZBNAME]: ' + nzbname) - #gotta replace & or escape it - nzbname = re.sub("\&", 'and', nzbname) - nzbname = re.sub('[\,\:\?\'\+]', '', nzbname) - nzbname = re.sub('[\(\)]', ' ', nzbname) - logger.fdebug('[NZBNAME] nzbname (remove chars): ' + nzbname) - nzbname = re.sub('.cbr', '', nzbname).strip() - nzbname = re.sub('.cbz', '', nzbname).strip() - nzbname = re.sub('[\.\_]', ' ', nzbname).strip() - nzbname = re.sub('\s+', ' ', nzbname) #make sure we remove the extra spaces. - logger.fdebug('[NZBNAME] nzbname (remove extensions, double spaces, convert underscores to spaces): ' + nzbname) - nzbname = re.sub('\s', '.', nzbname) - - logger.fdebug(module + ' After conversions, nzbname is : ' + str(nzbname)) -# if mylar.USE_NZBGET==1: -# nzbname=self.nzb_name - self._log("nzbname: " + str(nzbname)) - - nzbiss = myDB.selectone("SELECT * from nzblog WHERE nzbname=? or altnzbname=?", [nzbname, nzbname]).fetchone() - - self.oneoff = False - if nzbiss is None: - self._log("Failure - could not initially locate nzbfile in my database to rename.") - logger.fdebug(module + ' Failure - could not locate nzbfile initially') - # if failed on spaces, change it all to decimals and try again. - nzbname = re.sub('[\(\)]', '', str(nzbname)) - self._log("trying again with this nzbname: " + str(nzbname)) - logger.fdebug(module + ' Trying to locate nzbfile again with nzbname of : ' + str(nzbname)) - nzbiss = myDB.selectone("SELECT * from nzblog WHERE nzbname=? or altnzbname=?", [nzbname, nzbname]).fetchone() - if nzbiss is None: - logger.error(module + ' Unable to locate downloaded file within items I have snatched. Attempting to parse the filename directly and process.') - #set it up to run manual post-processing on self.nzb_folder - self._log('Unable to locate downloaded file within items I have snatched. Attempting to parse the filename directly and process.') - self.valreturn.append({"self.log": self.log, - "mode": 'outside'}) - return self.queue.put(self.valreturn) + else: + #one-off manual pp'd of torrents + oneofflist = myDB.select("select s.Issue_Number, s.ComicName, s.IssueID, s.ComicID, s.Provider, w.PUBLISHER, w.weeknumber, w.year from snatched as s inner join nzblog as n on s.IssueID = n.IssueID and s.Hash is not NULL inner join weekly as w on s.IssueID = w.IssueID WHERE (s.Provider ='32P' or s.Provider='TPSE' or s.Provider='WWT' or s.Provider='DEM') AND n.OneOff == 1;") + if oneofflist is None: + logger.fdebug(module + ' No one-off\'s have ever been snatched using Mylar.') else: - self._log("I corrected and found the nzb as : " + str(nzbname)) - logger.fdebug(module + ' Auto-corrected and found the nzb as : ' + str(nzbname)) - #issueid = nzbiss['IssueID'] + oneoffvals = [] + oneoff_issuelist = [] + nm = 0 + for ofl in oneofflist: + oneoffvals.append({"ComicName": ofl['ComicName'], + "ComicPublisher": ofl['PUBLISHER'], + "Issue_Number": ofl['Issue_Number'], + "AlternateSearch": None, + "ComicID": ofl['ComicID'], + "IssueID": ofl['IssueID'], + "WatchValues": {"SeriesYear": None, + "LatestDate": None, + "ComicVersion": None, + "Publisher": ofl['PUBLISHER'], + "Total": None, + "ComicID": ofl['ComicID'], + "IsArc": False}}) - issueid = nzbiss['IssueID'] - logger.fdebug(module + ' Issueid: ' + str(issueid)) - sarc = nzbiss['SARC'] - self.oneoff = nzbiss['OneOff'] - tmpiss = myDB.selectone('SELECT * FROM issues WHERE IssueID=?', [issueid]).fetchone() + for fl in filelist['comiclist']: + #logger.info('fl: %s' % fl) + for ofv in oneoffvals: + #logger.info('ofv: %s' % ofv) + wm = filechecker.FileChecker(watchcomic=ofv['ComicName'], Publisher=ofv['ComicPublisher'], AlternateSearch=None, manual=ofv['WatchValues']) + watchmatch = wm.matchIT(fl) + if watchmatch['process_status'] == 'fail': + nm+=1 + continue + else: + temploc= watchmatch['justthedigits'].replace('_', ' ') + temploc = re.sub('[\#\']', '', temploc) - comicid = None - comicname = None - issuenumber = None - if tmpiss is not None: - comicid = tmpiss['ComicID'] - comicname = tmpiss['ComicName'] - issuenumber = tmpiss['Issue_Number'] - elif all([self.oneoff is not None, mylar.ALT_PULL == 2]): - oneinfo = myDB.selectone('SELECT * FROM weekly WHERE IssueID=?', [issueid]).fetchone() - if oneinfo is not None: - comicid = oneinfo['ComicID'] - comicname = oneinfo['COMIC'] - issuenumber = oneinfo['ISSUE'] - publisher = oneinfo['PUBLISHER'] - self.oneoff = True - logger.info(module + ' Discovered %s # %s by %s [comicid:%s][issueid:%s]' % (comicname, issuenumber, publisher, comicid, issueid)) - #use issueid to get publisher, series, year, issue number + logger.info('watchmatch: %s' % watchmatch) + if 'annual' in temploc.lower(): + biannchk = re.sub('-', '', temploc.lower()).strip() + if 'biannual' in biannchk: + logger.fdebug(module + ' Bi-Annual detected.') + fcdigit = helpers.issuedigits(re.sub('biannual', '', str(biannchk)).strip()) + else: + fcdigit = helpers.issuedigits(re.sub('annual', '', str(temploc.lower())).strip()) + logger.fdebug(module + ' Annual detected [' + str(fcdigit) +']. ComicID assigned as ' + str(ofv['ComicID'])) + annchk = "yes" + else: + fcdigit = helpers.issuedigits(temploc) - annchk = "no" -# if 'annual' in nzbname.lower(): -# logger.info(module + ' Annual detected.') -# annchk = "yes" -# issuenzb = myDB.selectone("SELECT * from annuals WHERE IssueID=? AND ComicName NOT NULL", [issueid]).fetchone() -# else: -# issuenzb = myDB.selectone("SELECT * from issues WHERE IssueID=? AND ComicName NOT NULL", [issueid]).fetchone() + if fcdigit == helpers.issuedigits(ofv['Issue_Number']): + if watchmatch['sub']: + clocation = os.path.join(watchmatch['comiclocation'], watchmatch['sub'], helpers.conversion(watchmatch['comicfilename'])) + else: + clocation = os.path.join(watchmatch['comiclocation'],helpers.conversion(watchmatch['comicfilename'])) + oneoff_issuelist.append({"ComicLocation": clocation, + "ComicID": ofv['ComicID'], + "IssueID": ofv['IssueID'], + "IssueNumber": ofv['Issue_Number'], + "ComicName": ofv['ComicName'], + "One-Off": True}) + self.oneoffinlist = True + else: + logger.fdebug(module + ' No corresponding issue # in dB found for %s # %s' % (ofv['ComicName'],ofv['Issue_Number'])) + continue - issuenzb = myDB.selectone("SELECT * from issues WHERE IssueID=? AND ComicName NOT NULL", [issueid]).fetchone() - if issuenzb is None: - logger.info(module + ' Could not detect as a standard issue - checking against annuals.') - issuenzb = myDB.selectone("SELECT * from annuals WHERE IssueID=? AND ComicName NOT NULL", [issueid]).fetchone() - if issuenzb is None: - logger.info(module + ' issuenzb not found.') - #if it's non-numeric, it contains a 'G' at the beginning indicating it's a multi-volume - #using GCD data. Set sandwich to 1 so it will bypass and continue post-processing. - if 'S' in issueid: - sandwich = issueid - elif 'G' in issueid or '-' in issueid: - sandwich = 1 - elif any([self.oneoff is True, issueid >= '900000', issueid == '1']): - logger.info(module + ' [ONE-OFF POST-PROCESSING] One-off download detected. Post-processing as a non-watchlist item.') - sandwich = None #arbitrarily set it to None just to force one-off downloading below. - else: - logger.error(module + ' Unable to locate downloaded file as being initiated via Mylar. Attempting to parse the filename directly and process.') + logger.fdebug(module + '[SUCCESSFUL MATCH: ' + ofv['ComicName'] + '-' + ofv['ComicID'] + '] Match verified for ' + helpers.conversion(fl['comicfilename'])) + break + + if any([self.nzb_name != 'Manual Run', self.oneoffinlist is True]): + ppinfo = [] + if self.oneoffinlist is False: + nzbname = self.nzb_name + #remove extensions from nzb_name if they somehow got through (Experimental most likely) + if nzbname.lower().endswith(self.extensions): + fd, ext = os.path.splitext(nzbname) + self._log("Removed extension from nzb: " + ext) + nzbname = re.sub(str(ext), '', str(nzbname)) + + #replace spaces + # let's change all space to decimals for simplicity + logger.fdebug('[NZBNAME]: ' + nzbname) + #gotta replace & or escape it + nzbname = re.sub("\&", 'and', nzbname) + nzbname = re.sub('[\,\:\?\'\+]', '', nzbname) + nzbname = re.sub('[\(\)]', ' ', nzbname) + logger.fdebug('[NZBNAME] nzbname (remove chars): ' + nzbname) + nzbname = re.sub('.cbr', '', nzbname).strip() + nzbname = re.sub('.cbz', '', nzbname).strip() + nzbname = re.sub('[\.\_]', ' ', nzbname).strip() + nzbname = re.sub('\s+', ' ', nzbname) #make sure we remove the extra spaces. + logger.fdebug('[NZBNAME] nzbname (remove extensions, double spaces, convert underscores to spaces): ' + nzbname) + nzbname = re.sub('\s', '.', nzbname) + + logger.fdebug(module + ' After conversions, nzbname is : ' + str(nzbname)) +# if mylar.USE_NZBGET==1: +# nzbname=self.nzb_name + self._log("nzbname: " + str(nzbname)) + + nzbiss = myDB.selectone("SELECT * from nzblog WHERE nzbname=? or altnzbname=?", [nzbname, nzbname]).fetchone() + + self.oneoff = False + if nzbiss is None: + self._log("Failure - could not initially locate nzbfile in my database to rename.") + logger.fdebug(module + ' Failure - could not locate nzbfile initially') + # if failed on spaces, change it all to decimals and try again. + nzbname = re.sub('[\(\)]', '', str(nzbname)) + self._log("trying again with this nzbname: " + str(nzbname)) + logger.fdebug(module + ' Trying to locate nzbfile again with nzbname of : ' + str(nzbname)) + nzbiss = myDB.selectone("SELECT * from nzblog WHERE nzbname=? or altnzbname=?", [nzbname, nzbname]).fetchone() + if nzbiss is None: + logger.error(module + ' Unable to locate downloaded file within items I have snatched. Attempting to parse the filename directly and process.') + #set it up to run manual post-processing on self.nzb_folder self._log('Unable to locate downloaded file within items I have snatched. Attempting to parse the filename directly and process.') self.valreturn.append({"self.log": self.log, "mode": 'outside'}) return self.queue.put(self.valreturn) - else: - logger.info(module + ' Successfully located issue as an annual. Continuing.') - annchk = "yes" + else: + self._log("I corrected and found the nzb as : " + str(nzbname)) + logger.fdebug(module + ' Auto-corrected and found the nzb as : ' + str(nzbname)) + #issueid = nzbiss['IssueID'] - if issuenzb is not None: - logger.info(module + ' issuenzb found.') - if helpers.is_number(issueid): - sandwich = int(issuenzb['IssueID']) -# else: -# logger.info(module + ' issuenzb not found.') -# #if it's non-numeric, it contains a 'G' at the beginning indicating it's a multi-volume -# #using GCD data. Set sandwich to 1 so it will bypass and continue post-processing. -# if 'S' in issueid: -# sandwich = issueid -# elif 'G' in issueid or '-' in issueid: -# sandwich = 1 - if sandwich is not None and helpers.is_number(sandwich): - if sandwich < 900000: - # if sandwich is less than 900000 it's a normal watchlist download. Bypass. - pass + issueid = nzbiss['IssueID'] + logger.fdebug(module + ' Issueid: ' + str(issueid)) + sarc = nzbiss['SARC'] + self.oneoff = nzbiss['OneOff'] + tmpiss = myDB.selectone('SELECT * FROM issues WHERE IssueID=?', [issueid]).fetchone() + + comicid = None + comicname = None + issuenumber = None + if tmpiss is not None: + ppinfo.append({'comicid': tmpiss['ComicID'], + 'issueid': issueid, + 'comicname': tmpiss['ComicName'], + 'issuenumber': tmpiss['Issue_Number'], + 'publisher': None, + 'sarc': sarc, + 'oneoff': self.oneoff}) + + elif all([self.oneoff is not None, mylar.ALT_PULL == 2]): + oneinfo = myDB.selectone('SELECT * FROM weekly WHERE IssueID=?', [issueid]).fetchone() + if oneinfo is not None: + ppinfo.append({'comicid': oneinfo['ComicID'], + 'comicname': oneinfo['COMIC'], + 'issuenumber': oneinfo['ISSUE'], + 'publisher': oneinfo['PUBLISHER'], + 'issueid': issueid, + 'sarc': None, + 'oneoff': True}) + + self.oneoff = True + #logger.info(module + ' Discovered %s # %s by %s [comicid:%s][issueid:%s]' % (comicname, issuenumber, publisher, comicid, issueid)) + #use issueid to get publisher, series, year, issue number else: - if any([self.oneoff is True, issuenzb is None]) or all([sandwich is not None, 'S' in sandwich]) or int(sandwich) >= 900000: - # this has no issueID, therefore it's a one-off or a manual post-proc. - # At this point, let's just drop it into the Comic Location folder and forget about it.. - if sandwich is not None and 'S' in sandwich: - self._log("One-off STORYARC mode enabled for Post-Processing for " + str(sarc)) - logger.info(module + ' One-off STORYARC mode enabled for Post-Processing for ' + str(sarc)) - else: - self._log("One-off mode enabled for Post-Processing. All I'm doing is moving the file untouched into the Grab-bag directory.") - logger.info(module + ' One-off mode enabled for Post-Processing. Will move into Grab-bag directory.') - self._log("Grab-Bag Directory set to : " + mylar.GRABBAG_DIR) - grdst = mylar.GRABBAG_DIR + for x in oneoff_issuelist: + if x['One-Off'] is True: + oneinfo = myDB.selectone('SELECT * FROM weekly WHERE IssueID=?', [x['IssueID']]).fetchone() + if oneinfo is not None: + ppinfo.append({'comicid': oneinfo['ComicID'], + 'comicname': oneinfo['COMIC'], + 'issuenumber': oneinfo['ISSUE'], + 'publisher': oneinfo['PUBLISHER'], + 'issueid': x['IssueID'], + 'comiclocation': x['ComicLocation'], + 'sarc': None, + 'oneoff': x['One-Off']}) + self.oneoff = True - odir = None - ofilename = None - for root, dirnames, filenames in os.walk(self.nzb_folder): - for filename in filenames: - if filename.lower().endswith(extensions): - odir = root - ofilename = filename - path, ext = os.path.splitext(ofilename) + if len(ppinfo) > 0: + for pp in ppinfo: + logger.info('[PPINFO-POST-PROCESSING-ATTEMPT] %s' % pp) + self.nzb_or_oneoff_pp(tinfo=pp) - if ofilename is None: - logger.error(module + ' Unable to post-process file as it is not in a valid cbr/cbz format. PostProcessing aborted.') - self._log('Unable to locate downloaded file to rename. PostProcessing aborted.') - self.valreturn.append({"self.log": self.log, - "mode": 'stop'}) - return self.queue.put(self.valreturn) + #if len(manual_list) > 0: + #for ml in manual_list: + # logger.info('[MANUAL-POST-PROCESSING-ATTEMPT] %s' % ml) + #self.nzb_or_oneoff_pp(manual=manual_list) - if odir is None: - odir = self.nzb_folder - - if sandwich is not None and 'S' in sandwich: - issuearcid = re.sub('S', '', issueid) - logger.fdebug(module + ' issuearcid:' + str(issuearcid)) - arcdata = myDB.selectone("SELECT * FROM readinglist WHERE IssueArcID=?", [issuearcid]).fetchone() - if arcdata is None: - logger.warn(module + ' Unable to locate issue within Story Arcs. Cannot post-process at this time - try to Refresh the Arc and manual post-process if necessary') - self._log('Unable to locate issue within Story Arcs in orde to properly assign metadata. PostProcessing aborted.') - self.valreturn.append({"self.log": self.log, - "mode": 'stop'}) - return self.queue.put(self.valreturn) - - if arcdata['Publisher'] is None: - arcpub = arcdata['IssuePublisher'] - else: - arcpub = arcdata['Publisher'] - - grdst = helpers.arcformat(arcdata['StoryArc'], helpers.spantheyears(arcdata['StoryArcID']), arcpub) - - if comicid is None: - comicid = arcdata['ComicID'] - if comicname is None: - comicname = arcdata['ComicName'] - if issuenumber is None: - issuenumber = arcdata['IssueNumber'] - issueid = arcdata['IssueID'] - - #tag the meta. - metaresponse = None - - crcvalue = helpers.crc(os.path.join(self.nzb_folder, ofilename)) - - #if a one-off download from the pull-list, will not have an issueid associated with it, and will fail to due conversion/tagging. - #if altpull/2 method is being used, issueid may already be present so conversion/tagging is possible with some additional fixes. - if all([mylar.ENABLE_META, issueid is not None]): - self._log("Metatagging enabled - proceeding...") - try: - import cmtagmylar - metaresponse = cmtagmylar.run(self.nzb_folder, issueid=issueid, filename=os.path.join(self.nzb_folder, ofilename)) - except ImportError: - logger.warn(module + ' comictaggerlib not found on system. Ensure the ENTIRE lib directory is located within mylar/lib/comictaggerlib/') - metaresponse = "fail" - - if metaresponse == "fail": - logger.fdebug(module + ' Unable to write metadata successfully - check mylar.log file. Attempting to continue without metatagging...') - elif metaresponse == "unrar error": - logger.error(module + ' This is a corrupt archive - whether CRC errors or it is incomplete. Marking as BAD, and retrying it.') - #launch failed download handling here. - elif metaresponse.startswith('file not found'): - filename_in_error = os.path.split(metaresponse, '||')[1] - self._log("The file cannot be found in the location provided for metatagging [" + filename_in_error + "]. Please verify it exists, and re-run if necessary. Attempting to continue without metatagging...") - logger.error(module + ' The file cannot be found in the location provided for metagging [' + filename_in_error + ']. Please verify it exists, and re-run if necessary. Attempting to continue without metatagging...') - else: - odir = os.path.split(metaresponse)[0] - ofilename = os.path.split(metaresponse)[1] - ext = os.path.splitext(metaresponse)[1] - logger.info(module + ' Sucessfully wrote metadata to .cbz (' + ofilename + ') - Continuing..') - self._log('Sucessfully wrote metadata to .cbz (' + ofilename + ') - proceeding...') - - dfilename = ofilename - - if metaresponse: - src_location = odir - else: - src_location = self.nzb_folder - - grab_src = os.path.join(src_location, ofilename) - self._log("Source Path : " + grab_src) - logger.info(module + ' Source Path : ' + grab_src) - - checkdirectory = filechecker.validateAndCreateDirectory(grdst, True, module=module) - if not checkdirectory: - logger.warn(module + ' Error trying to validate/create directory. Aborting this process at this time.') - self.valreturn.append({"self.log": self.log, - "mode": 'stop'}) - return self.queue.put(self.valreturn) - - #send to renamer here if valid. - if mylar.RENAME_FILES: - renamed_file = helpers.rename_param(comicid, comicname, issuenumber, dfilename, issueid=issueid, arc=sarc) - if renamed_file: - dfilename = renamed_file['nfilename'] - logger.fdebug(module + ' Renaming file to conform to configuration: ' + ofilename) - - - if sandwich is not None and 'S' in sandwich: - #if from a StoryArc, check to see if we're appending the ReadingOrder to the filename - if mylar.READ2FILENAME: - logger.fdebug(module + ' readingorder#: ' + str(arcdata['ReadingOrder'])) - if int(arcdata['ReadingOrder']) < 10: readord = "00" + str(arcdata['ReadingOrder']) - elif int(arcdata['ReadingOrder']) >= 10 and int(arcdata['ReadingOrder']) <= 99: readord = "0" + str(arcdata['ReadingOrder']) - else: readord = str(arcdata['ReadingOrder']) - dfilename = str(readord) + "-" + dfilename - else: - dfilename = ofilename - grab_dst = os.path.join(grdst, dfilename) - else: - grab_dst = os.path.join(grdst, ofilename) - - self._log("Destination Path : " + grab_dst) - - logger.info(module + ' Destination Path : ' + grab_dst) - - logger.info(module + '[' + mylar.FILE_OPTS + '] ' + str(ofilename) + ' into directory : ' + str(grab_dst)) - - try: - fileoperation = helpers.file_ops(grab_src, grab_dst) - if not fileoperation: - raise OSError - except (OSError, IOError): - logger.fdebug(module + ' Failed to ' + mylar.FILE_OPTS + ' ' + src + ' - check directories and manually re-run.') - self._log("Failed to " + mylar.FILE_OPTS + " " + src + " - check directories and manually re-run.") - return - - #tidyup old path - if any([mylar.FILE_OPTS == 'move', mylar.FILE_OPTS == 'copy']): - self.tidyup(src_location, True) - - #delete entry from nzblog table - myDB.action('DELETE from nzblog WHERE issueid=?', [issueid]) - - if sandwich is not None and 'S' in sandwich: - #issuearcid = re.sub('S', '', issueid) - logger.info(module + ' IssueArcID is : ' + str(issuearcid)) - ctrlVal = {"IssueArcID": issuearcid} - newVal = {"Status": "Downloaded", - "Location": grab_dst} - myDB.upsert("readinglist", newVal, ctrlVal) - logger.info(module + ' Updated status to Downloaded') - - logger.info(module + ' Post-Processing completed for: [' + sarc + '] ' + grab_dst) - self._log(u"Post Processing SUCCESSFUL! ") - elif self.oneoff is True: - logger.info(module + ' IssueID is : ' + str(issueid)) - ctrlVal = {"IssueID": issueid} - newVal = {"Status": "Downloaded"} - logger.info(module + ' Writing to db: ' + str(newVal) + ' -- ' + str(ctrlVal)) - myDB.upsert("weekly", newVal, ctrlVal) - logger.info(module + ' Updated status to Downloaded') - myDB.upsert("oneoffhistory", newVal, ctrlVal) - logger.info(module + ' Updated history for one-off\'s for tracking purposes') - logger.info(module + ' Post-Processing completed for: [ %s #%s ] %s' % (comicname, issuenumber, grab_dst)) - self._log(u"Post Processing SUCCESSFUL! ") - - try: - self.sendnotify(comicname, issueyear=None, issuenumOG=issuenumber, annchk=annchk, module=module) - except: - pass - - self.valreturn.append({"self.log": self.log, - "mode": 'stop'}) - return self.queue.put(self.valreturn) +# annchk = "no" +# issuenzb = myDB.selectone("SELECT * from issues WHERE IssueID=? AND ComicName NOT NULL", [issueid]).fetchone() +# if issuenzb is None: +# logger.info(module + ' Could not detect as a standard issue - checking against annuals.') +# issuenzb = myDB.selectone("SELECT * from annuals WHERE IssueID=? AND ComicName NOT NULL", [issueid]).fetchone() +# if issuenzb is None: +# logger.info(module + ' issuenzb not found.') +# #if it's non-numeric, it contains a 'G' at the beginning indicating it's a multi-volume +# #using GCD data. Set sandwich to 1 so it will bypass and continue post-processing. +# if 'S' in issueid: +# sandwich = issueid +# elif 'G' in issueid or '-' in issueid: +# sandwich = 1 +# elif any([self.oneoff is True, issueid >= '900000', issueid == '1']): +# logger.info(module + ' [ONE-OFF POST-PROCESSING] One-off download detected. Post-processing as a non-watchlist item.') +# sandwich = None #arbitrarily set it to None just to force one-off downloading below. +# else: +# logger.error(module + ' Unable to locate downloaded file as being initiated via Mylar. Attempting to parse the filename directly and process.') +# self._log('Unable to locate downloaded file within items I have snatched. Attempting to parse the filename directly and process.') +# self.valreturn.append({"self.log": self.log, +# "mode": 'outside'}) +# return self.queue.put(self.valreturn) +# else: +# logger.info(module + ' Successfully located issue as an annual. Continuing.') +# annchk = "yes" +# +# if issuenzb is not None: +# logger.info(module + ' issuenzb found.') +# if helpers.is_number(issueid): +# sandwich = int(issuenzb['IssueID']) +# if sandwich is not None and helpers.is_number(sandwich): +# if sandwich < 900000: +# # if sandwich is less than 900000 it's a normal watchlist download. Bypass. +# pass +# else: +# if any([self.oneoff is True, issuenzb is None]) or all([sandwich is not None, 'S' in sandwich]) or int(sandwich) >= 900000: +# # this has no issueID, therefore it's a one-off or a manual post-proc. +# # At this point, let's just drop it into the Comic Location folder and forget about it.. +# if sandwich is not None and 'S' in sandwich: +# self._log("One-off STORYARC mode enabled for Post-Processing for " + str(sarc)) +# logger.info(module + ' One-off STORYARC mode enabled for Post-Processing for ' + str(sarc)) +# else: +# self._log("One-off mode enabled for Post-Processing. All I'm doing is moving the file untouched into the Grab-bag directory.") +# logger.info(module + ' One-off mode enabled for Post-Processing. Will move into Grab-bag directory.') +# self._log("Grab-Bag Directory set to : " + mylar.GRABBAG_DIR) +# grdst = mylar.GRABBAG_DIR +# +# odir = None +# ofilename = None +# for root, dirnames, filenames in os.walk(self.nzb_folder): +# for filename in filenames: +# if filename.lower().endswith(self.extensions): +# odir = root +# ofilename = filename +# path, ext = os.path.splitext(ofilename) +# +# if ofilename is None: +# logger.error(module + ' Unable to post-process file as it is not in a valid cbr/cbz format. PostProcessing aborted.') +# self._log('Unable to locate downloaded file to rename. PostProcessing aborted.') +# self.valreturn.append({"self.log": self.log, +# "mode": 'stop'}) +# return self.queue.put(self.valreturn) +# +# if odir is None: +# odir = self.nzb_folder +# +# if sandwich is not None and 'S' in sandwich: +# issuearcid = re.sub('S', '', issueid) +# logger.fdebug(module + ' issuearcid:' + str(issuearcid)) +# arcdata = myDB.selectone("SELECT * FROM readinglist WHERE IssueArcID=?", [issuearcid]).fetchone() +# if arcdata is None: +# logger.warn(module + ' Unable to locate issue within Story Arcs. Cannot post-process at this time - try to Refresh the Arc and manual post-process if necessary') +# self._log('Unable to locate issue within Story Arcs in orde to properly assign metadata. PostProcessing aborted.') +# self.valreturn.append({"self.log": self.log, +# "mode": 'stop'}) +# return self.queue.put(self.valreturn) +# +# if arcdata['Publisher'] is None: +# arcpub = arcdata['IssuePublisher'] +# else: +# arcpub = arcdata['Publisher'] +# +# grdst = helpers.arcformat(arcdata['StoryArc'], helpers.spantheyears(arcdata['StoryArcID']), arcpub) +# +# if comicid is None: +# comicid = arcdata['ComicID'] +# if comicname is None: +# comicname = arcdata['ComicName'] +# if issuenumber is None: +# issuenumber = arcdata['IssueNumber'] +# issueid = arcdata['IssueID'] +# +# #tag the meta. +# metaresponse = None +# +# crcvalue = helpers.crc(os.path.join(self.nzb_folder, ofilename)) +# +# #if a one-off download from the pull-list, will not have an issueid associated with it, and will fail to due conversion/tagging. +# #if altpull/2 method is being used, issueid may already be present so conversion/tagging is possible with some additional fixes. +# if all([mylar.ENABLE_META, issueid is not None]): +# self._log("Metatagging enabled - proceeding...") +# try: +# import cmtagmylar +# metaresponse = cmtagmylar.run(self.nzb_folder, issueid=issueid, filename=os.path.join(self.nzb_folder, ofilename)) +# except ImportError: +# logger.warn(module + ' comictaggerlib not found on system. Ensure the ENTIRE lib directory is located within mylar/lib/comictaggerlib/') +# metaresponse = "fail" +# +# if metaresponse == "fail": +# logger.fdebug(module + ' Unable to write metadata successfully - check mylar.log file. Attempting to continue without metatagging...') +# elif metaresponse == "unrar error": +# logger.error(module + ' This is a corrupt archive - whether CRC errors or it is incomplete. Marking as BAD, and retrying it.') +# #launch failed download handling here. +# elif metaresponse.startswith('file not found'): +# filename_in_error = os.path.split(metaresponse, '||')[1] +# self._log("The file cannot be found in the location provided for metatagging [" + filename_in_error + "]. Please verify it exists, and re-run if necessary. Attempting to continue without metatagging...") +# logger.error(module + ' The file cannot be found in the location provided for metagging [' + filename_in_error + ']. Please verify it exists, and re-run if necessary. Attempting to continue without metatagging...') +# else: +# odir = os.path.split(metaresponse)[0] +# ofilename = os.path.split(metaresponse)[1] +# ext = os.path.splitext(metaresponse)[1] +# logger.info(module + ' Sucessfully wrote metadata to .cbz (' + ofilename + ') - Continuing..') +# self._log('Sucessfully wrote metadata to .cbz (' + ofilename + ') - proceeding...') +# +# dfilename = ofilename +# +# if metaresponse: +# src_location = odir +# else: +# src_location = self.nzb_folder +# +# grab_src = os.path.join(src_location, ofilename) +# self._log("Source Path : " + grab_src) +# logger.info(module + ' Source Path : ' + grab_src) +# +# checkdirectory = filechecker.validateAndCreateDirectory(grdst, True, module=module) +# if not checkdirectory: +# logger.warn(module + ' Error trying to validate/create directory. Aborting this process at this time.') +# self.valreturn.append({"self.log": self.log, +# "mode": 'stop'}) +# return self.queue.put(self.valreturn) +# +# #send to renamer here if valid. +# if mylar.RENAME_FILES: +# renamed_file = helpers.rename_param(comicid, comicname, issuenumber, dfilename, issueid=issueid, arc=sarc) +# if renamed_file: +# dfilename = renamed_file['nfilename'] +# logger.fdebug(module + ' Renaming file to conform to configuration: ' + ofilename) +# +# +# if sandwich is not None and 'S' in sandwich: +# #if from a StoryArc, check to see if we're appending the ReadingOrder to the filename +# if mylar.READ2FILENAME: +# logger.fdebug(module + ' readingorder#: ' + str(arcdata['ReadingOrder'])) +# if int(arcdata['ReadingOrder']) < 10: readord = "00" + str(arcdata['ReadingOrder']) +# elif int(arcdata['ReadingOrder']) >= 10 and int(arcdata['ReadingOrder']) <= 99: readord = "0" + str(arcdata['ReadingOrder']) +# else: readord = str(arcdata['ReadingOrder']) +# dfilename = str(readord) + "-" + dfilename +# else: +# dfilename = ofilename +# grab_dst = os.path.join(grdst, dfilename) +# else: +# grab_dst = os.path.join(grdst, ofilename) +# +# self._log("Destination Path : " + grab_dst) +# +# logger.info(module + ' Destination Path : ' + grab_dst) +# +# logger.info(module + '[' + mylar.FILE_OPTS + '] ' + str(ofilename) + ' into directory : ' + str(grab_dst)) +# +# try: +# fileoperation = helpers.file_ops(grab_src, grab_dst) +# if not fileoperation: +# raise OSError +# except (OSError, IOError): +# logger.fdebug(module + ' Failed to ' + mylar.FILE_OPTS + ' ' + src + ' - check directories and manually re-run.') +# self._log("Failed to " + mylar.FILE_OPTS + " " + src + " - check directories and manually re-run.") +# return +# +# #tidyup old path +# if any([mylar.FILE_OPTS == 'move', mylar.FILE_OPTS == 'copy']): +# self.tidyup(src_location, True) +# +# #delete entry from nzblog table +# myDB.action('DELETE from nzblog WHERE issueid=?', [issueid]) +# +# if sandwich is not None and 'S' in sandwich: +# #issuearcid = re.sub('S', '', issueid) +# logger.info(module + ' IssueArcID is : ' + str(issuearcid)) +# ctrlVal = {"IssueArcID": issuearcid} +# newVal = {"Status": "Downloaded", +# "Location": grab_dst} +# myDB.upsert("readinglist", newVal, ctrlVal) +# logger.info(module + ' Updated status to Downloaded') +# +# logger.info(module + ' Post-Processing completed for: [' + sarc + '] ' + grab_dst) +# self._log(u"Post Processing SUCCESSFUL! ") +# elif self.oneoff is True: +# logger.info(module + ' IssueID is : ' + str(issueid)) +# ctrlVal = {"IssueID": issueid} +# newVal = {"Status": "Downloaded"} +# logger.info(module + ' Writing to db: ' + str(newVal) + ' -- ' + str(ctrlVal)) +# myDB.upsert("weekly", newVal, ctrlVal) +# logger.info(module + ' Updated status to Downloaded') +# myDB.upsert("oneoffhistory", newVal, ctrlVal) +# logger.info(module + ' Updated history for one-off\'s for tracking purposes') +# logger.info(module + ' Post-Processing completed for: [ %s #%s ] %s' % (comicname, issuenumber, grab_dst)) +# self._log(u"Post Processing SUCCESSFUL! ") +# +# try: +# self.sendnotify(comicname, issueyear=None, issuenumOG=issuenumber, annchk=annchk, module=module) +# except: +# pass +# +# self.valreturn.append({"self.log": self.log, +# "mode": 'stop'}) +# return self.queue.put(self.valreturn) if self.nzb_name == 'Manual Run': @@ -1210,8 +1302,9 @@ class PostProcessor(object): return elif len(manual_arclist) > 0: logger.info(module + ' Manual post-processing completed for ' + str(len(manual_arclist)) + ' story-arc issues.') - + i = 0 + logger.info('manual list: %s' % manual_list) for ml in manual_list: i+=1 comicid = ml['ComicID'] @@ -1232,7 +1325,7 @@ class PostProcessor(object): break dupthis = helpers.duplicate_filecheck(ml['ComicLocation'], ComicID=comicid, IssueID=issueid) - if dupthis[0]['action'] == 'dupe_src' or dupthis[0]['action'] == 'dupe_file': + if dupthis['action'] == 'dupe_src' or dupthis['action'] == 'dupe_file': #check if duplicate dump folder is enabled and if so move duplicate file in there for manual intervention. #'dupe_file' - do not write new file as existing file is better quality #'dupe_src' - write new file, as existing file is a lesser quality (dupe) @@ -1242,50 +1335,362 @@ class PostProcessor(object): logger.warn('Unable to move duplicate file - skipping post-processing of this file.') continue - - if dupthis[0]['action'] == "write" or dupthis[0]['action'] == 'dupe_src': + if any([dupthis['action'] == "write", dupthis['action'] == 'dupe_src']): stat = ' [' + str(i) + '/' + str(len(manual_list)) + ']' self.Process_next(comicid, issueid, issuenumOG, ml, stat) dupthis = None - logger.info(module + ' Manual post-processing completed for ' + str(i) + ' issues.') + if self.failed_files == 0: + logger.info(module + ' Manual post-processing completed for ' + str(i) + ' issues.') + else: + logger.info(module + ' Manual post-processing completed for ' + str(i) + ' issues [FAILED: ' + str(self.failed_files) + ']') return else: - comicid = issuenzb['ComicID'] - issuenumOG = issuenzb['Issue_Number'] - #the self.nzb_folder should contain only the existing filename - dupthis = helpers.duplicate_filecheck(self.nzb_folder, ComicID=comicid, IssueID=issueid) - if dupthis[0]['action'] == 'dupe_src' or dupthis[0]['action'] == 'dupe_file': + pass + # comicid = issuenzb['ComicID'] + # issuenumOG = issuenzb['Issue_Number'] + # #the self.nzb_folder should contain only the existing filename + # dupthis = helpers.duplicate_filecheck(self.nzb_folder, ComicID=comicid, IssueID=issueid) + # if dupthis['action'] == 'dupe_src' or dupthis['action'] == 'dupe_file': + # #check if duplicate dump folder is enabled and if so move duplicate file in there for manual intervention. + # #'dupe_file' - do not write new file as existing file is better quality + # #'dupe_src' - write new file, as existing file is a lesser quality (dupe) + # if mylar.DUPLICATE_DUMP: + # if mylar.DDUMP and not all([mylar.DUPLICATE_DUMP is None, mylar.DUPLICATE_DUMP == '']): + # dupchkit = self.duplicate_process(dupthis) + # if dupchkit == False: + # logger.warn('Unable to move duplicate file - skipping post-processing of this file.') + # self.valreturn.append({"self.log": self.log, + # "mode": 'stop', + # "issueid": issueid, + # "comicid": comicid}) + # return self.queue.put(self.valreturn) + # + # if dupthis['action'] == "write" or dupthis['action'] == 'dupe_src': + # return self.Process_next(comicid, issueid, issuenumOG) + # else: + # self.valreturn.append({"self.log": self.log, + # "mode": 'stop', + # "issueid": issueid, + # "comicid": comicid}) + # return self.queue.put(self.valreturn) + + def nzb_or_oneoff_pp(self, tinfo=None, manual=None): + module = self.module + myDB = db.DBConnection() + if manual is None: + issueid = tinfo['issueid'] + comicid = tinfo['comicid'] + comicname = tinfo['comicname'] + issuenumber = tinfo['issuenumber'] + publisher = tinfo['publisher'] + sarc = tinfo['sarc'] + oneoff = tinfo['oneoff'] + if oneoff is True: + location = os.path.abspath(os.path.join(tinfo['comiclocation'], os.pardir)) + else: + location = self.nzb_folder + annchk = "no" + issuenzb = myDB.selectone("SELECT * from issues WHERE IssueID=? AND ComicName NOT NULL", [issueid]).fetchone() + if issuenzb is None: + logger.info(module + ' Could not detect as a standard issue - checking against annuals.') + issuenzb = myDB.selectone("SELECT * from annuals WHERE IssueID=? AND ComicName NOT NULL", [issueid]).fetchone() + if issuenzb is None: + logger.info(module + ' issuenzb not found.') + #if it's non-numeric, it contains a 'G' at the beginning indicating it's a multi-volume + #using GCD data. Set sandwich to 1 so it will bypass and continue post-processing. + if 'S' in issueid: + sandwich = issueid + elif 'G' in issueid or '-' in issueid: + sandwich = 1 + elif any([oneoff is True, issueid >= '900000', issueid == '1']): + logger.info(module + ' [ONE-OFF POST-PROCESSING] One-off download detected. Post-processing as a non-watchlist item.') + sandwich = None #arbitrarily set it to None just to force one-off downloading below. + else: + logger.error(module + ' Unable to locate downloaded file as being initiated via Mylar. Attempting to parse the filename directly and process.') + self._log('Unable to locate downloaded file within items I have snatched. Attempting to parse the filename directly and process.') + self.valreturn.append({"self.log": self.log, + "mode": 'outside'}) + return self.queue.put(self.valreturn) + else: + logger.info(module + ' Successfully located issue as an annual. Continuing.') + annchk = "yes" + + if issuenzb is not None: + logger.info(module + ' issuenzb found.') + if helpers.is_number(issueid): + sandwich = int(issuenzb['IssueID']) + if sandwich is not None and helpers.is_number(sandwich): + if sandwich < 900000: + # if sandwich is less than 900000 it's a normal watchlist download. Bypass. + pass + else: + if any([oneoff is True, issuenzb is None]) or all([sandwich is not None, 'S' in sandwich]) or int(sandwich) >= 900000: + # this has no issueID, therefore it's a one-off or a manual post-proc. + # At this point, let's just drop it into the Comic Location folder and forget about it.. + if sandwich is not None and 'S' in sandwich: + self._log("One-off STORYARC mode enabled for Post-Processing for " + sarc) + logger.info(module + ' One-off STORYARC mode enabled for Post-Processing for ' + sarc) + else: + self._log("One-off mode enabled for Post-Processing. All I'm doing is moving the file untouched into the Grab-bag directory.") + logger.info(module + ' One-off mode enabled for Post-Processing. Will move into Grab-bag directory.') + self._log("Grab-Bag Directory set to : " + mylar.GRABBAG_DIR) + grdst = mylar.GRABBAG_DIR + + odir = location + ofilename = tinfo['comiclocation'] + path, ext = os.path.splitext(ofilename) + + if ofilename is None: + logger.error(module + ' Unable to post-process file as it is not in a valid cbr/cbz format. PostProcessing aborted.') + self._log('Unable to locate downloaded file to rename. PostProcessing aborted.') + self.valreturn.append({"self.log": self.log, + "mode": 'stop'}) + return self.queue.put(self.valreturn) + + if odir is None: + odir = self.nzb_folder + + if sandwich is not None and 'S' in sandwich: + issuearcid = re.sub('S', '', issueid) + logger.fdebug(module + ' issuearcid:' + str(issuearcid)) + arcdata = myDB.selectone("SELECT * FROM readinglist WHERE IssueArcID=?", [issuearcid]).fetchone() + if arcdata is None: + logger.warn(module + ' Unable to locate issue within Story Arcs. Cannot post-process at this time - try to Refresh the Arc and manual post-process if necessary.') + self._log('Unable to locate issue within Story Arcs in orde to properly assign metadata. PostProcessing aborted.') + self.valreturn.append({"self.log": self.log, + "mode": 'stop'}) + return self.queue.put(self.valreturn) + + if arcdata['Publisher'] is None: + arcpub = arcdata['IssuePublisher'] + else: + arcpub = arcdata['Publisher'] + + grdst = helpers.arcformat(arcdata['StoryArc'], helpers.spantheyears(arcdata['StoryArcID']), arcpub) + + if comicid is None: + comicid = arcdata['ComicID'] + if comicname is None: + comicname = arcdata['ComicName'] + if issuenumber is None: + issuenumber = arcdata['IssueNumber'] + issueid = arcdata['IssueID'] + + #tag the meta. + metaresponse = None + crcvalue = helpers.crc(os.path.join(location, ofilename)) + + #if a one-off download from the pull-list, will not have an issueid associated with it, and will fail to due conversion/tagging. + #if altpull/2 method is being used, issueid may already be present so conversion/tagging is possible with some additional fixes. + if all([mylar.ENABLE_META, issueid is not None]): + self._log("Metatagging enabled - proceeding...") + try: + import cmtagmylar + metaresponse = cmtagmylar.run(location, issueid=issueid, filename=os.path.join(self.nzb_folder, ofilename)) + except ImportError: + logger.warn(module + ' comictaggerlib not found on system. Ensure the ENTIRE lib directory is located within mylar/lib/comictaggerlib/') + metaresponse = "fail" + + if metaresponse == "fail": + logger.fdebug(module + ' Unable to write metadata successfully - check mylar.log file. Attempting to continue without metatagging...') + elif any([metaresponse == "unrar error", metaresponse == "corrupt"]): + logger.error(module + ' This is a corrupt archive - whether CRC errors or it is incomplete. Marking as BAD, and retrying it.') + #launch failed download handling here. + elif metaresponse.startswith('file not found'): + filename_in_error = os.path.split(metaresponse, '||')[1] + self._log("The file cannot be found in the location provided for metatagging [" + filename_in_error + "]. Please verify it exists, and re-run if necessary.") + logger.error(module + ' The file cannot be found in the location provided for metagging [' + filename_in_error + ']. Please verify it exists, and re-run if necessary.') + else: + odir = os.path.split(metaresponse)[0] + ofilename = os.path.split(metaresponse)[1] + ext = os.path.splitext(metaresponse)[1] + logger.info(module + ' Sucessfully wrote metadata to .cbz (' + ofilename + ') - Continuing..') + self._log('Sucessfully wrote metadata to .cbz (' + ofilename + ') - proceeding...') + + dfilename = ofilename + if metaresponse: + src_location = odir + else: + src_location = location + + grab_src = os.path.join(src_location, ofilename) + self._log("Source Path : " + grab_src) + logger.info(module + ' Source Path : ' + grab_src) + + checkdirectory = filechecker.validateAndCreateDirectory(grdst, True, module=module) + if not checkdirectory: + logger.warn(module + ' Error trying to validate/create directory. Aborting this process at this time.') + self.valreturn.append({"self.log": self.log, + "mode": 'stop'}) + return self.queue.put(self.valreturn) + + #send to renamer here if valid. + if mylar.RENAME_FILES: + renamed_file = helpers.rename_param(comicid, comicname, issuenumber, dfilename, issueid=issueid, arc=sarc) + if renamed_file: + dfilename = renamed_file['nfilename'] + logger.fdebug(module + ' Renaming file to conform to configuration: ' + ofilename) + + if sandwich is not None and 'S' in sandwich: + #if from a StoryArc, check to see if we're appending the ReadingOrder to the filename + if mylar.READ2FILENAME: + logger.fdebug(module + ' readingorder#: ' + str(arcdata['ReadingOrder'])) + if int(arcdata['ReadingOrder']) < 10: readord = "00" + str(arcdata['ReadingOrder']) + elif int(arcdata['ReadingOrder']) >= 10 and int(arcdata['ReadingOrder']) <= 99: readord = "0" + str(arcdata['ReadingOrder']) + else: readord = str(arcdata['ReadingOrder']) + dfilename = str(readord) + "-" + dfilename + else: + dfilename = ofilename + grab_dst = os.path.join(grdst, dfilename) + else: + grab_dst = os.path.join(grdst, ofilename) + + self._log("Destination Path : " + grab_dst) + + logger.info(module + ' Destination Path : ' + grab_dst) + logger.info(module + '[' + mylar.FILE_OPTS + '] ' + ofilename + ' into directory : ' + grab_dst) + + try: + fileoperation = helpers.file_ops(grab_src, grab_dst) + if not fileoperation: + raise OSError + except (OSError, IOError): + logger.fdebug(module + ' Failed to ' + mylar.FILE_OPTS + ' ' + src + ' - check directories and manually re-run.') + self._log("Failed to " + mylar.FILE_OPTS + " " + src + " - check directories and manually re-run.") + return + + #tidyup old path + if any([mylar.FILE_OPTS == 'move', mylar.FILE_OPTS == 'copy']): + self.tidyup(src_location, True) + + #delete entry from nzblog table + myDB.action('DELETE from nzblog WHERE issueid=?', [issueid]) + + if sandwich is not None and 'S' in sandwich: + logger.info(module + ' IssueArcID is : ' + str(issuearcid)) + ctrlVal = {"IssueArcID": issuearcid} + newVal = {"Status": "Downloaded", + "Location": grab_dst} + myDB.upsert("readinglist", newVal, ctrlVal) + logger.info(module + ' Updated status to Downloaded') + + logger.info(module + ' Post-Processing completed for: [' + sarc + '] ' + grab_dst) + self._log(u"Post Processing SUCCESSFUL! ") + elif oneoff is True: + logger.info(module + ' IssueID is : ' + str(issueid)) + ctrlVal = {"IssueID": issueid} + newVal = {"Status": "Downloaded"} + logger.info(module + ' Writing to db: ' + str(newVal) + ' -- ' + str(ctrlVal)) + myDB.upsert("weekly", newVal, ctrlVal) + logger.info(module + ' Updated status to Downloaded') + myDB.upsert("oneoffhistory", newVal, ctrlVal) + logger.info(module + ' Updated history for one-off\'s for tracking purposes') + logger.info(module + ' Post-Processing completed for: [ %s #%s ] %s' % (comicname, issuenumber, grab_dst)) + self._log(u"Post Processing SUCCESSFUL! ") + + try: + self.sendnotify(comicname, issueyear=None, issuenumOG=issuenumber, annchk=annchk, module=module) + except: + pass + + self.valreturn.append({"self.log": self.log, + "mode": 'stop'}) + + return self.queue.put(self.valreturn) + + else: + manual_list = manual + + if self.nzb_name == 'Manual Run': + #loop through the hits here. + if len(manual_list) == 0 and len(manual_arclist) == 0: + logger.info(module + ' No matches for Manual Run ... exiting.') + return + elif len(manual_arclist) > 0 and len(manual_list) == 0: + logger.info(module + ' Manual post-processing completed for ' + str(len(manual_arclist)) + ' story-arc issues.') + return + elif len(manual_arclist) > 0: + logger.info(module + ' Manual post-processing completed for ' + str(len(manual_arclist)) + ' story-arc issues.') + i = 0 + logger.info('manual list: %s' % manual_list) + for ml in manual_list: + i+=1 + comicid = ml['ComicID'] + issueid = ml['IssueID'] + issuenumOG = ml['IssueNumber'] + #check to see if file is still being written to. + while True: + waiting = False + try: + ctime = max(os.path.getctime(ml['ComicLocation']), os.path.getmtime(ml['ComicLocation'])) + if time.time() > ctime > time.time() - 10: + time.sleep(max(time.time() - ctime, 0)) + waiting = True + else: + break + except: + #file is no longer present in location / can't be accessed. + break + + dupthis = helpers.duplicate_filecheck(ml['ComicLocation'], ComicID=comicid, IssueID=issueid) + if dupthis['action'] == 'dupe_src' or dupthis['action'] == 'dupe_file': + #check if duplicate dump folder is enabled and if so move duplicate file in there for manual intervention. + #'dupe_file' - do not write new file as existing file is better quality #check if duplicate dump folder is enabled and if so move duplicate file in there for manual intervention. #'dupe_file' - do not write new file as existing file is better quality #'dupe_src' - write new file, as existing file is a lesser quality (dupe) - if mylar.DUPLICATE_DUMP: - if mylar.DDUMP and not all([mylar.DUPLICATE_DUMP is None, mylar.DUPLICATE_DUMP == '']): - dupchkit = self.duplicate_process(dupthis) - if dupchkit == False: - logger.warn('Unable to move duplicate file - skipping post-processing of this file.') - self.valreturn.append({"self.log": self.log, - "mode": 'stop', - "issueid": issueid, - "comicid": comicid}) + if mylar.DDUMP and not all([mylar.DUPLICATE_DUMP is None, mylar.DUPLICATE_DUMP == '']): #DUPLICATE_DUMP + dupchkit = self.duplicate_process(dupthis) + if dupchkit == False: + logger.warn('Unable to move duplicate file - skipping post-processing of this file.') + continue - return self.queue.put(self.valreturn) - - if dupthis[0]['action'] == "write" or dupthis[0]['action'] == 'dupe_src': - return self.Process_next(comicid, issueid, issuenumOG) - else: - self.valreturn.append({"self.log": self.log, - "mode": 'stop', - "issueid": issueid, - "comicid": comicid}) + if any([dupthis['action'] == "write", dupthis['action'] == 'dupe_src']): + stat = ' [' + str(i) + '/' + str(len(manual_list)) + ']' + self.Process_next(comicid, issueid, issuenumOG, ml, stat) + dupthis = None + + if self.failed_files == 0: + logger.info(module + ' Manual post-processing completed for ' + str(i) + ' issues.') + else: + logger.info(module + ' Manual post-processing completed for ' + str(i) + ' issues [FAILED: ' + str(self.failed_files) + ']') + return + + else: + comicid = issuenzb['ComicID'] + issuenumOG = issuenzb['Issue_Number'] + #the self.nzb_folder should contain only the existing filename + dupthis = helpers.duplicate_filecheck(self.nzb_folder, ComicID=comicid, IssueID=issueid) + if dupthis['action'] == 'dupe_src' or dupthis['action'] == 'dupe_file': + #check if duplicate dump folder is enabled and if so move duplicate file in there for manual intervention. + #'dupe_file' - do not write new file as existing file is better quality + #'dupe_src' - write new file, as existing file is a lesser quality (dupe) + if mylar.DUPLICATE_DUMP: + if mylar.DDUMP and not all([mylar.DUPLICATE_DUMP is None, mylar.DUPLICATE_DUMP == '']): + dupchkit = self.duplicate_process(dupthis) + if dupchkit == False: + logger.warn('Unable to move duplicate file - skipping post-processing of this file.') + self.valreturn.append({"self.log": self.log, + "mode": 'stop', + "issueid": issueid, + "comicid": comicid}) + return self.queue.put(self.valreturn) + + if dupthis['action'] == "write" or dupthis['action'] == 'dupe_src': + return self.Process_next(comicid, issueid, issuenumOG) + else: + self.valreturn.append({"self.log": self.log, + "mode": 'stop', + "issueid": issueid, + "comicid": comicid}) + return self.queue.put(self.valreturn) - return self.queue.put(self.valreturn) def Process_next(self, comicid, issueid, issuenumOG, ml=None, stat=None): if stat is None: stat = ' [1/1]' module = self.module annchk = "no" - extensions = ('.cbr', '.cbz', '.pdf') snatchedtorrent = False myDB = db.DBConnection() comicnzb = myDB.selectone("SELECT * from comics WHERE comicid=?", [comicid]).fetchone() @@ -1297,7 +1702,6 @@ class PostProcessor(object): else: logger.fdebug(module + ' Was downloaded from ' + snatchnzb['Provider'] + '. Enabling torrent manual post-processing completion notification.') snatchedtorrent = True - if issuenzb is None: issuenzb = myDB.selectone("SELECT * from annuals WHERE issueid=? and comicid=?", [issueid, comicid]).fetchone() annchk = "yes" @@ -1577,7 +1981,7 @@ class PostProcessor(object): ofilename = None for root, dirnames, filenames in os.walk(self.nzb_folder, followlinks=True): for filename in filenames: - if filename.lower().endswith(extensions): + if filename.lower().endswith(self.extensions): odir = root logger.fdebug(module + ' odir (root): ' + odir) ofilename = filename @@ -1594,6 +1998,7 @@ class PostProcessor(object): if ofilename is None: self._log("Unable to locate a valid cbr/cbz file. Aborting post-processing for this filename.") logger.error(module + ' unable to locate a valid cbr/cbz file. Aborting post-processing for this filename.') + self.failed_files +=1 self.valreturn.append({"self.log": self.log, "mode": 'stop'}) return self.queue.put(self.valreturn) @@ -1641,23 +2046,32 @@ class PostProcessor(object): if pcheck == "fail": self._log("Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging...") logger.fdebug(module + ' Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging...') + self.failed_files +=1 #we need to set this to the cbz file since not doing it will result in nothing getting moved. #not sure how to do this atm - elif pcheck == "unrar error": - self._log("This is a corrupt archive - whether CRC errors or it's incomplete. Marking as BAD, and retrying a different copy.") - logger.error(module + ' This is a corrupt archive - whether CRC errors or it is incomplete. Marking as BAD, and retrying a different copy.') - self.valreturn.append({"self.log": self.log, - "mode": 'fail', - "issueid": issueid, - "comicid": comicid, - "comicname": comicnzb['ComicName'], - "issuenumber": issuenzb['Issue_Number'], - "annchk": annchk}) + elif any([pcheck == "unrar error", pcheck == "corrupt"]): + if ml is not None: + self._log("This is a corrupt archive - whether CRC errors or it's incomplete. Marking as BAD, and not post-processing.") + logger.error(module + ' This is a corrupt archive - whether CRC errors or it is incomplete. Marking as BAD, and not post-processing.') + self.failed_files +=1 + self.valreturn.append({"self.log": self.log, + "mode": 'stop'}) + else: + self._log("This is a corrupt archive - whether CRC errors or it's incomplete. Marking as BAD, and retrying a different copy.") + logger.error(module + ' This is a corrupt archive - whether CRC errors or it is incomplete. Marking as BAD, and retrying a different copy.') + self.valreturn.append({"self.log": self.log, + "mode": 'fail', + "issueid": issueid, + "comicid": comicid, + "comicname": comicnzb['ComicName'], + "issuenumber": issuenzb['Issue_Number'], + "annchk": annchk}) return self.queue.put(self.valreturn) elif pcheck.startswith('file not found'): filename_in_error = os.path.split(pcheck, '||')[1] self._log("The file cannot be found in the location provided [" + filename_in_error + "]. Please verify it exists, and re-run if necessary. Aborting.") logger.error(module + ' The file cannot be found in the location provided [' + filename_in_error + ']. Please verify it exists, and re-run if necessary. Aborting') + self.failed_files +=1 self.valreturn.append({"self.log": self.log, "mode": 'stop'}) return self.queue.put(self.valreturn) @@ -1669,7 +2083,7 @@ class PostProcessor(object): ext = os.path.splitext(ofilename)[1] self._log("Sucessfully wrote metadata to .cbz - Continuing..") logger.info(module + ' Sucessfully wrote metadata to .cbz (' + ofilename + ') - Continuing..') - #if this is successful, and we're copying to dst then set the file op to move this cbz so we + #if this is successful, and we're copying to dst then set the file op to move this cbz so we #don't leave a cbr/cbz in the origianl directory. #self.fileop = shutil.move #Run Pre-script @@ -1715,7 +2129,7 @@ class PostProcessor(object): # ofilename = None # for root, dirnames, filenames in os.walk(self.nzb_folder, followlinks=True): # for filename in filenames: -# if filename.lower().endswith(extensions): +# if filename.lower().endswith(self.extensions): # odir = root # logger.fdebug(module + ' odir (root): ' + odir) # ofilename = filename @@ -1752,6 +2166,7 @@ class PostProcessor(object): if any([ofilename == odir, ofilename == odir[:-1], ofilename == '']): self._log("There was a problem deciphering the filename/directory - please verify that the filename : [" + ofilename + "] exists in location [" + odir + "]. Aborting.") logger.error(module + ' There was a problem deciphering the filename/directory - please verify that the filename : [' + ofilename + '] exists in location [' + odir + ']. Aborting.') + self.failed_files +=1 self.valreturn.append({"self.log": self.log, "mode": 'stop'}) return self.queue.put(self.valreturn) @@ -1762,6 +2177,7 @@ class PostProcessor(object): if ofilename is None or ofilename == '': logger.error(module + ' Aborting PostProcessing - the filename does not exist in the location given. Make sure that ' + self.nzb_folder + ' exists and is the correct location.') + self.failed_files +=1 self.valreturn.append({"self.log": self.log, "mode": 'stop'}) return self.queue.put(self.valreturn) @@ -1774,7 +2190,7 @@ class PostProcessor(object): self._log("Rename Files isn't enabled...keeping original filename.") logger.fdebug(module + ' Rename Files is not enabled - keeping original filename.') #check if extension is in nzb_name - will screw up otherwise - if ofilename.lower().endswith(extensions): + if ofilename.lower().endswith(self.extensions): nfilename = ofilename[:-4] else: nfilename = ofilename @@ -1793,6 +2209,7 @@ class PostProcessor(object): checkdirectory = filechecker.validateAndCreateDirectory(comlocation, True, module=module) if not checkdirectory: logger.warn(module + ' Error trying to validate/create directory. Aborting this process at this time.') + self.failed_files +=1 self.valreturn.append({"self.log": self.log, "mode": 'stop'}) return self.queue.put(self.valreturn) @@ -1867,7 +2284,7 @@ class PostProcessor(object): except (OSError, IOError): logger.fdebug(module + ' Failed to ' + mylar.FILE_OPTS + ' ' + src + ' - check directories and manually re-run.') logger.fdebug(module + ' Post-Processing ABORTED.') - + self.failed_files +=1 self.valreturn.append({"self.log": self.log, "mode": 'stop'}) return self.queue.put(self.valreturn) @@ -1948,7 +2365,7 @@ class PostProcessor(object): logger.fdebug(module + ' Destination Path : ' + grab_dst) grab_src = dst - logger.fdebug(module + ' Source Path : ' + grab_src) + logger.fdebug(module + ' Source Path : ' + grab_src) logger.info(module + '[' + mylar.ARC_FILEOPS.upper() + '] ' + str(dst) + ' into directory : ' + str(grab_dst)) try: @@ -2032,7 +2449,7 @@ class PostProcessor(object): def sendnotify(self, series, issueyear, issuenumOG, annchk, module): - + if annchk == "no": if issueyear is None: prline = series + ' - issue #' + issuenumOG @@ -2076,7 +2493,7 @@ class PostProcessor(object): if mylar.TELEGRAM_ENABLED: telegram = notifiers.TELEGRAM() telegram.notify(prline, prline2) - + if mylar.SLACK_ENABLED: slack = notifiers.SLACK() slack.notify("Download and Postprocessing completed", prline, module=module) @@ -2098,7 +2515,11 @@ class FolderCheck(): return #monitor a selected folder for 'snatched' files that haven't been processed #junk the queue as it's not needed for folder monitoring, but needed for post-processing to run without error. + helpers.job_management(write=True, job='Folder Monitor', current_run=helpers.utctimestamp(), status='Running') + mylar.MONITOR_STATUS = 'Running' logger.info(self.module + ' Checking folder ' + mylar.CHECK_FOLDER + ' for newly snatched downloads') PostProcess = PostProcessor('Manual Run', mylar.CHECK_FOLDER, queue=self.queue) result = PostProcess.Process() logger.info(self.module + ' Finished checking for newly snatched downloads') + helpers.job_management(write=True, job='Folder Monitor', last_run_completed=helpers.utctimestamp(), status='Waiting') + mylar.MONITOR_STATUS = 'Waiting' diff --git a/mylar/__init__.py b/mylar/__init__.py index 92cfd8bd..3b357b4c 100644 --- a/mylar/__init__.py +++ b/mylar/__init__.py @@ -33,12 +33,14 @@ import locale import re from threading import Lock, Thread -from apscheduler.scheduler import Scheduler +from apscheduler.schedulers.background import BackgroundScheduler +from apscheduler.triggers.interval import IntervalTrigger + from configobj import ConfigObj import cherrypy -from mylar import logger, versioncheckit, rsscheckit, searchit, weeklypullit, dbupdater, PostProcessor, helpers, scheduler #versioncheck, rsscheck, search, PostProcessor, weeklypull, helpers, scheduler +from mylar import logger, versioncheckit, rsscheckit, searchit, weeklypullit, PostProcessor, updater, helpers FULL_PATH = None PROG_DIR = None @@ -65,9 +67,15 @@ IMPORT_PARSED_COUNT = 0 IMPORT_FAILURE_COUNT = 0 CHECKENABLED = False -SCHED = Scheduler() +SCHED = BackgroundScheduler({ + 'apscheduler.executors.default': { + 'class': 'apscheduler.executors.pool:ThreadPoolExecutor', + 'max_workers': '20' + }, + 'apscheduler.job_defaults.coalesce': 'false', + 'apscheduler.job_defaults.max_instances': '3', + 'apscheduler.timezone': 'UTC'}) -INIT_LOCK = threading.Lock() __INITIALIZED__ = False started = False WRITELOCK = False @@ -76,6 +84,20 @@ IMPORTLOCK = False ## for use with updated scheduler (not working atm) INIT_LOCK = Lock() +SCHED_DBUPDATE_LAST = None +SCHED_RSS_LAST = None +SCHED_SEARCH_LAST = None +SCHED_WEEKLY_LAST = None +SCHED_VERSION_LAST = None +SCHED_MONITOR_LAST = None + +MONITOR_STATUS = 'Waiting' +SEARCH_STATUS = 'Waiting' +RSS_STATUS = 'Waiting' +WEEKLY_STATUS = 'Waiting' +VERSION_STATUS = 'Waiting' +UPDATER_STATUS = 'Waiting' + dbUpdateScheduler = None searchScheduler = None RSSScheduler = None @@ -366,7 +388,6 @@ UPCOMING_SNATCHED = 1 ENABLE_RSS = 0 RSS_CHECKINTERVAL = 20 -RSS_LASTRUN = None #these are used to set the comparison against the post-processing scripts STATIC_COMICRN_VERSION = "1.01" @@ -404,6 +425,7 @@ TPSE_VERIFY = True ENABLE_32P = 0 SEARCH_32P = 0 #0 = use WS to grab torrent groupings, #1 = use 32P to grab torrent groupings +DEEP_SEARCH_32P = 0 #0 = do not take multiple search series results & use ref32p if available, #1= search each search series result for valid issue & posting date MODE_32P = None #0 = legacymode, #1 = authmode KEYS_32P = None RSSFEED_32P = None @@ -411,6 +433,7 @@ PASSKEY_32P = None USERNAME_32P = None PASSWORD_32P = None AUTHKEY_32P = None +INKDROPS_32P = None FEEDINFO_32P = None VERIFY_32P = 1 SNATCHEDTORRENT_NOTIFY = 0 @@ -516,9 +539,11 @@ def initialize(): USE_UTORRENT, UTORRENT_HOST, UTORRENT_USERNAME, UTORRENT_PASSWORD, UTORRENT_LABEL, USE_TRANSMISSION, TRANSMISSION_HOST, TRANSMISSION_USERNAME, TRANSMISSION_PASSWORD, TRANSMISSION_DIRECTORY, USE_DELUGE, DELUGE_HOST, DELUGE_USERNAME, DELUGE_PASSWORD, DELUGE_LABEL, \ USE_QBITTORRENT, QBITTORRENT_HOST, QBITTORRENT_USERNAME, QBITTORRENT_PASSWORD, QBITTORRENT_LABEL, QBITTORRENT_FOLDER, QBITTORRENT_STARTONLOAD, \ ENABLE_META, CMTAGGER_PATH, CBR2CBZ_ONLY, CT_TAG_CR, CT_TAG_CBL, CT_CBZ_OVERWRITE, UNRAR_CMD, CT_SETTINGSPATH, CMTAG_VOLUME, CMTAG_START_YEAR_AS_VOLUME, UPDATE_ENDED, INDIE_PUB, BIGGIE_PUB, IGNORE_HAVETOTAL, SNATCHED_HAVETOTAL, PROVIDER_ORDER, TMP_PROV, \ - dbUpdateScheduler, searchScheduler, RSSScheduler, WeeklyScheduler, VersionScheduler, FolderMonitorScheduler, \ + SCHED, dbUpdateScheduler, searchScheduler, RSSScheduler, WeeklyScheduler, VersionScheduler, FolderMonitorScheduler, \ + SCHED_DBUPDATE_LAST, SCHED_RSS_LAST, SCHED_SEARCH_LAST, SCHED_WEEKLY_LAST, SCHED_VERSION_LAST, SCHED_MONITOR_LAST, \ + MONITOR_STATUS, SEARCH_STATUS, UPDATER_STATUS, VERSION_STATUS, WEEKLY_STATUS, RSS_STATUS, \ ALLOW_PACKS, ENABLE_TORRENTS, TORRENT_DOWNLOADER, MINSEEDS, USE_WATCHDIR, TORRENT_LOCAL, LOCAL_WATCHDIR, TORRENT_SEEDBOX, SEEDBOX_HOST, SEEDBOX_PORT, SEEDBOX_USER, SEEDBOX_PASS, SEEDBOX_WATCHDIR, \ - ENABLE_RSS, RSS_CHECKINTERVAL, RSS_LASTRUN, FAILED_DOWNLOAD_HANDLING, FAILED_AUTO, ENABLE_TORRENT_SEARCH, ENABLE_TPSE, WWTURL, DEMURL, TPSEURL, TPSE_PROXY, TPSE_VERIFY, ENABLE_32P, SEARCH_32P, MODE_32P, KEYS_32P, RSSFEED_32P, USERNAME_32P, PASSWORD_32P, AUTHKEY_32P, PASSKEY_32P, FEEDINFO_32P, VERIFY_32P, SNATCHEDTORRENT_NOTIFY, \ + ENABLE_RSS, RSS_CHECKINTERVAL, FAILED_DOWNLOAD_HANDLING, FAILED_AUTO, ENABLE_TORRENT_SEARCH, ENABLE_TPSE, WWTURL, DEMURL, TPSEURL, TPSE_PROXY, TPSE_VERIFY, ENABLE_32P, SEARCH_32P, DEEP_SEARCH_32P, MODE_32P, KEYS_32P, RSSFEED_32P, USERNAME_32P, PASSWORD_32P, AUTHKEY_32P, INKDROPS_32P, PASSKEY_32P, FEEDINFO_32P, VERIFY_32P, SNATCHEDTORRENT_NOTIFY, \ PROWL_ENABLED, PROWL_PRIORITY, PROWL_KEYS, PROWL_ONSNATCH, NMA_ENABLED, NMA_APIKEY, NMA_PRIORITY, NMA_ONSNATCH, PUSHOVER_ENABLED, PUSHOVER_PRIORITY, PUSHOVER_APIKEY, PUSHOVER_USERKEY, PUSHOVER_ONSNATCH, BOXCAR_ENABLED, BOXCAR_ONSNATCH, BOXCAR_TOKEN, \ PUSHBULLET_ENABLED, PUSHBULLET_APIKEY, PUSHBULLET_DEVICEID, PUSHBULLET_CHANNEL_TAG, PUSHBULLET_ONSNATCH, LOCMOVE, NEWCOM_DIR, FFTONEWCOM_DIR, \ PREFERRED_QUALITY, MOVE_FILES, RENAME_FILES, LOWERCASE_FILENAMES, USE_MINSIZE, MINSIZE, USE_MAXSIZE, MAXSIZE, CORRECT_METADATA, \ @@ -764,7 +789,6 @@ def initialize(): ENABLE_RSS = bool(check_setting_int(CFG, 'General', 'enable_rss', 1)) RSS_CHECKINTERVAL = check_setting_str(CFG, 'General', 'rss_checkinterval', '20') - RSS_LASTRUN = check_setting_str(CFG, 'General', 'rss_lastrun', '') FAILED_DOWNLOAD_HANDLING = bool(check_setting_int(CFG, 'General', 'failed_download_handling', 0)) FAILED_AUTO = bool(check_setting_int(CFG, 'General', 'failed_auto', 0)) @@ -794,6 +818,7 @@ def initialize(): else: ENABLE_32P = bool(check_setting_int(CFG, 'Torrents', 'enable_32p', 0)) SEARCH_32P = bool(check_setting_int(CFG, 'Torrents', 'search_32p', 0)) + DEEP_SEARCH_32P = bool(check_setting_int(CFG, 'Torrents', 'deep_search_32p', 0)) MODE_32P = check_setting_int(CFG, 'Torrents', 'mode_32p', 0) #legacy support of older config - reload into old values for consistency. @@ -1291,49 +1316,49 @@ def initialize(): COMICSORT = helpers.ComicSort(sequence='startup') #initialize the scheduler threads here. - dbUpdateScheduler = scheduler.Scheduler(action=dbupdater.dbUpdate(), - cycleTime=datetime.timedelta(hours=48), - runImmediately=False, - threadName="DBUPDATE") + #dbUpdateScheduler = scheduler.Scheduler(action=dbupdater.dbUpdate(), + # cycleTime=datetime.timedelta(minutes=5), + # runImmediately=False, + # threadName="DBUPDATE") - if NZB_STARTUP_SEARCH: - searchrunmode = True - else: - searchrunmode = False + #if NZB_STARTUP_SEARCH: + # searchrunmode = True + #else: + # searchrunmode = False - searchScheduler = scheduler.Scheduler(searchit.CurrentSearcher(), - cycleTime=datetime.timedelta(minutes=SEARCH_INTERVAL), - threadName="SEARCH", - runImmediately=searchrunmode) - - RSSScheduler = scheduler.Scheduler(rsscheckit.tehMain(), - cycleTime=datetime.timedelta(minutes=int(RSS_CHECKINTERVAL)), - threadName="RSSCHECK", - runImmediately=True, - delay=30) - - if ALT_PULL == 2: - weektimer = 4 - else: - weektimer = 24 - - WeeklyScheduler = scheduler.Scheduler(weeklypullit.Weekly(), - cycleTime=datetime.timedelta(hours=weektimer), - threadName="WEEKLYCHECK", - runImmediately=True, - delay=10) - - VersionScheduler = scheduler.Scheduler(versioncheckit.CheckVersion(), - cycleTime=datetime.timedelta(minutes=CHECK_GITHUB_INTERVAL), - threadName="VERSIONCHECK", - runImmediately=False) + #searchScheduler = scheduler.Scheduler(searchit.CurrentSearcher(), + # cycleTime=datetime.timedelta(minutes=SEARCH_INTERVAL), + # threadName="SEARCH", + # runImmediately=searchrunmode) - FolderMonitorScheduler = scheduler.Scheduler(PostProcessor.FolderCheck(), - cycleTime=datetime.timedelta(minutes=int(DOWNLOAD_SCAN_INTERVAL)), - threadName="FOLDERMONITOR", - runImmediately=True, - delay=60) + #RSSScheduler = scheduler.Scheduler(rsscheckit.tehMain(), + # cycleTime=datetime.timedelta(minutes=int(RSS_CHECKINTERVAL)), + # threadName="RSSCHECK", + # runImmediately=True, + # delay=30) + + #if ALT_PULL == 2: + # weektimer = 4 + #else: + # weektimer = 24 + + #WeeklyScheduler = scheduler.Scheduler(weeklypullit.Weekly(), + # cycleTime=datetime.timedelta(hours=weektimer), + # threadName="WEEKLYCHECK", + # runImmediately=True, + # delay=10) + + #VersionScheduler = scheduler.Scheduler(versioncheckit.CheckVersion(), + # cycleTime=datetime.timedelta(minutes=CHECK_GITHUB_INTERVAL), + # threadName="VERSIONCHECK", + # runImmediately=False) + + #FolderMonitorScheduler = scheduler.Scheduler(PostProcessor.FolderCheck(), + # cycleTime=datetime.timedelta(minutes=int(DOWNLOAD_SCAN_INTERVAL)), + # threadName="FOLDERMONITOR", + # runImmediately=True, + # delay=60) # Store the original umask UMASK = os.umask(0) @@ -1558,7 +1583,6 @@ def config_write(): new_config['General']['upcoming_snatched'] = int(UPCOMING_SNATCHED) new_config['General']['enable_rss'] = int(ENABLE_RSS) new_config['General']['rss_checkinterval'] = RSS_CHECKINTERVAL - new_config['General']['rss_lastrun'] = RSS_LASTRUN new_config['General']['failed_download_handling'] = int(FAILED_DOWNLOAD_HANDLING) new_config['General']['failed_auto'] = int(FAILED_AUTO) @@ -1598,6 +1622,7 @@ def config_write(): new_config['Torrents']['tpse_verify'] = TPSE_VERIFY new_config['Torrents']['enable_32p'] = int(ENABLE_32P) new_config['Torrents']['search_32p'] = int(SEARCH_32P) + new_config['Torrents']['deep_search_32p'] = int(DEEP_SEARCH_32P) new_config['Torrents']['mode_32p'] = int(MODE_32P) new_config['Torrents']['passkey_32p'] = PASSKEY_32P new_config['Torrents']['rssfeed_32p'] = RSSFEED_32P @@ -1734,7 +1759,7 @@ def config_write(): new_config['TELEGRAM']['telegram_token'] = TELEGRAM_TOKEN new_config['TELEGRAM']['telegram_userid'] = TELEGRAM_USERID new_config['TELEGRAM']['telegram_onsnatch'] = int(TELEGRAM_ONSNATCH) - + new_config['SLACK'] = {} new_config['SLACK']['slack_enabled'] = int(SLACK_ENABLED) new_config['SLACK']['slack_webhook_url'] = SLACK_WEBHOOK_URL @@ -1744,69 +1769,119 @@ def config_write(): def start(): - global __INITIALIZED__, started, \ - dbUpdateScheduler, searchScheduler, RSSScheduler, \ - WeeklyScheduler, VersionScheduler, FolderMonitorScheduler + global __INITIALIZED__, started with INIT_LOCK: if __INITIALIZED__: + #load up the previous runs from the job sql table so we know stuff... + helpers.job_management() + # Start our scheduled background tasks - #from mylar import updater, search, PostProcessor + SCHED.add_job(func=updater.dbUpdate, id='dbupdater', name='DB Updater', args=[None,None,True], trigger=IntervalTrigger(hours=0, minutes=5, timezone='UTC')) + #let's do a run at the Wanted issues here (on startup) if enabled. + ss = searchit.CurrentSearcher() + if NZB_STARTUP_SEARCH: + SCHED.add_job(func=ss.run, id='search', next_run_time=datetime.datetime.now(), name='Auto-Search', trigger=IntervalTrigger(hours=0, minutes=SEARCH_INTERVAL, timezone='UTC')) + else: + if SCHED_SEARCH_LAST is not None: + search_timestamp = float(SCHED_SEARCH_LAST) + logger.fdebug('[AUTO-SEARCH] Search last run @ %s' % datetime.datetime.utcfromtimestamp(search_timestamp)) + else: + search_timestamp = helpers.utctimestamp() + (int(SEARCH_INTERVAL) *60) - #SCHED.add_interval_job(updater.dbUpdate, hours=48) - #SCHED.add_interval_job(search.searchforissue, minutes=SEARCH_INTERVAL) + duration_diff = (helpers.utctimestamp() - search_timestamp)/60 + logger.fdebug('[AUTO-SEARCH] duration_diff : %s' % duration_diff) + if duration_diff >= int(SEARCH_INTERVAL): + logger.fdebug('[AUTO-SEARCH]Auto-Search set to a delay of one minute before initialization as it has been %s minutes since the last run' % duration_diff) + SCHED.add_job(func=ss.run, id='search', name='Auto-Search', trigger=IntervalTrigger(hours=0, minutes=SEARCH_INTERVAL, timezone='UTC')) + else: + search_diff = datetime.datetime.utcfromtimestamp(helpers.utctimestamp() + ((int(SEARCH_INTERVAL) * 60) - (duration_diff*60))) + logger.fdebug('[AUTO-SEARCH] Scheduling next run @ %s every %s minutes' % (search_diff, SEARCH_INTERVAL)) + SCHED.add_job(func=ss.run, id='search', name='Auto-Search', next_run_time=search_diff, trigger=IntervalTrigger(hours=0, minutes=SEARCH_INTERVAL, timezone='UTC')) if all([ENABLE_TORRENTS, AUTO_SNATCH, OS_DETECT != 'Windows']) and any([TORRENT_DOWNLOADER == 2, TORRENT_DOWNLOADER == 4]): - logger.info('Auto-Snatch of comleted torrents enabled & attempting to backgroun load....') + logger.info('[AUTO-SNATCHER] Auto-Snatch of completed torrents enabled & attempting to backgroun load....') SNPOOL = threading.Thread(target=helpers.worker_main, args=(SNATCHED_QUEUE,), name="AUTO-SNATCHER") SNPOOL.start() - logger.info('Succesfully started Auto-Snatch add-on - will now monitor for completed torrents on client....') - - #start the db updater scheduler - logger.info('Initializing the DB Updater.') - dbUpdateScheduler.thread.start() - - #start the search scheduler - searchScheduler.thread.start() + logger.info('[AUTO-SNATCHER] Succesfully started Auto-Snatch add-on - will now monitor for completed torrents on client....') helpers.latestdate_fix() #initiate startup rss feeds for torrents/nzbs here... if ENABLE_RSS: - #SCHED.add_interval_job(rsscheck.tehMain, minutes=int(RSS_CHECKINTERVAL)) - RSSScheduler.thread.start() - logger.info('Initiating startup-RSS feed checks.') - #rsscheck.tehMain() + logger.info('[RSS-FEEDS] Initiating startup-RSS feed checks.') + if SCHED_RSS_LAST is not None: + rss_timestamp = float(SCHED_RSS_LAST) + logger.info('[RSS-FEEDS] RSS last run @ %s' % datetime.datetime.utcfromtimestamp(rss_timestamp)) + else: + rss_timestamp = helpers.utctimestamp() + (int(RSS_CHECKINTERVAL) *60) + rs = rsscheckit.tehMain() + logger.fdebug('[RSS-FEEDS] rss_timestamp: %s' % rss_timestamp) + logger.fdebug('[RSS-FEEDS] utcfromtimestamp: %s' % helpers.utctimestamp()) + logger.fdebug('[RSS-FEEDS] rss_checkinterval: %s' % (int(RSS_CHECKINTERVAL) * 60)) + logger.fdebug('[RSS-FEEDS] today: %s' % datetime.datetime.utcfromtimestamp(helpers.utctimestamp())) + duration_diff = (helpers.utctimestamp() - rss_timestamp)/60 + logger.fdebug('[RSS-FEEDS] duration_diff (mins): %s' % str(duration_diff)) + if duration_diff >= int(RSS_CHECKINTERVAL): + SCHED.add_job(func=rs.run, id='rss', name='RSS Feeds', args=[True], next_run_time=datetime.datetime.now(), trigger=IntervalTrigger(hours=0, minutes=int(RSS_CHECKINTERVAL), timezone='UTC')) + else: + rss_diff = datetime.datetime.utcfromtimestamp(helpers.utctimestamp() + (int(RSS_CHECKINTERVAL) * 60) - (duration_diff * 60)) + logger.fdebug('[RSS-FEEDS] Scheduling next run for @ %s every %s minutes' % (rss_diff, RSS_CHECKINTERVAL)) + SCHED.add_job(func=rs.run, id='rss', name='RSS Feeds', args=[True], next_run_time=rss_diff, trigger=IntervalTrigger(hours=0, minutes=int(RSS_CHECKINTERVAL), timezone='UTC')) + + if ALT_PULL == 2: + weektimer = 4 + else: + weektimer = 24 #weekly pull list gets messed up if it's not populated first, so let's populate it then set the scheduler. - logger.info('Checking for existance of Weekly Comic listing...') - #PULLNEW = 'no' #reset the indicator here. - #threading.Thread(target=weeklypull.pullit).start() - #now the scheduler (check every 24 hours) - #SCHED.add_interval_job(weeklypull.pullit, hours=24) - if not NOWEEKLY: - WeeklyScheduler.thread.start() + logger.info('[WEEKLY] Checking for existance of Weekly Comic listing...') - #let's do a run at the Wanted issues here (on startup) if enabled. - #if NZB_STARTUP_SEARCH: - # threading.Thread(target=search.searchforissue).start() + #now the scheduler (check every 24 hours) + weekly_interval = weektimer * 60 * 60 + if SCHED_WEEKLY_LAST is not None: + weekly_timestamp = float(SCHED_WEEKLY_LAST) + else: + weekly_timestamp = helpers.utctimestamp() + weekly_interval + + ws = weeklypullit.Weekly() + duration_diff = (helpers.utctimestamp() - weekly_timestamp)/60 + + if duration_diff >= weekly_interval/60: + logger.info('[WEEKLY] Weekly Pull-Update initializing immediately as it has been %s hours since the last run' % (duration_diff/60)) + SCHED.add_job(func=ws.run, id='weekly', name='Weekly Pullist', next_run_time=datetime.datetime.now(), trigger=IntervalTrigger(hours=weektimer, minutes=0, timezone='UTC')) + else: + weekly_diff = datetime.datetime.utcfromtimestamp(helpers.utctimestamp() + (weekly_interval - (duration_diff * 60))) + logger.fdebug('[WEEKLY] Scheduling next run for @ %s every %s hours' % (weekly_diff, weektimer)) + SCHED.add_job(func=ws.run, id='weekly', name='Weekly Pullist', next_run_time=weekly_diff, trigger=IntervalTrigger(hours=weektimer, minutes=0, timezone='UTC')) if CHECK_GITHUB: - VersionScheduler.thread.start() - #SCHED.add_interval_job(versioncheck.checkGithub, minutes=CHECK_GITHUB_INTERVAL) + vs = versioncheckit.CheckVersion() + SCHED.add_job(func=vs.run, id='version', name='Check Version', trigger=IntervalTrigger(hours=0, minutes=CHECK_GITHUB_INTERVAL, timezone='UTC')) - #run checkFolder every X minutes (basically Manual Run Post-Processing) + ##run checkFolder every X minutes (basically Manual Run Post-Processing) if ENABLE_CHECK_FOLDER: if DOWNLOAD_SCAN_INTERVAL >0: - logger.info('Enabling folder monitor for : ' + str(CHECK_FOLDER) + ' every ' + str(DOWNLOAD_SCAN_INTERVAL) + ' minutes.') - FolderMonitorScheduler.thread.start() - #SCHED.add_interval_job(helpers.checkFolder, minutes=int(DOWNLOAD_SCAN_INTERVAL)) + logger.info('[FOLDER MONITOR] Enabling folder monitor for : ' + str(CHECK_FOLDER) + ' every ' + str(DOWNLOAD_SCAN_INTERVAL) + ' minutes.') + fm = PostProcessor.FolderCheck() + SCHED.add_job(func=fm.run, id='monitor', name='Folder Monitor', trigger=IntervalTrigger(hours=0, minutes=int(DOWNLOAD_SCAN_INTERVAL), timezone='UTC')) else: - logger.error('You need to specify a monitoring time for the check folder option to work') - SCHED.start() + logger.error('[FOLDER MONITOR] You need to specify a monitoring time for the check folder option to work') + + logger.info('Firing up the Background Schedulers now....') + try: + SCHED.print_jobs() + SCHED.start() + #update the job db here + logger.info('Background Schedulers successfully started...') + helpers.job_management(write=True) #, status='Waiting') + except Exception as e: + logger.info(e) + # Debug + SCHED.print_jobs() started = True @@ -1838,6 +1913,7 @@ def dbcheck(): c.execute('CREATE TABLE IF NOT EXISTS searchresults (SRID TEXT, results Numeric, Series TEXT, publisher TEXT, haveit TEXT, name TEXT, deck TEXT, url TEXT, description TEXT, comicid TEXT, comicimage TEXT, issues TEXT, comicyear TEXT, ogcname TEXT)') c.execute('CREATE TABLE IF NOT EXISTS ref32p (ComicID TEXT UNIQUE, ID TEXT, Series TEXT, Updated TEXT)') c.execute('CREATE TABLE IF NOT EXISTS oneoffhistory (ComicName TEXT, IssueNumber TEXT, ComicID TEXT, IssueID TEXT, Status TEXT, weeknumber TEXT, year TEXT)') + c.execute('CREATE TABLE IF NOT EXISTS jobhistory (JobName TEXT, prev_run_datetime timestamp, prev_run_timestamp REAL, next_run_datetime timestamp, next_run_timestamp REAL, last_run_completed TEXT, successful_completions TEXT, failed_completions TEXT, status TEXT)') conn.commit c.close #new @@ -2322,13 +2398,19 @@ def dbcheck(): except sqlite3.OperationalError: c.execute('ALTER TABLE Failed ADD COLUMN DateFailed TEXT') - ## -- Failed Table -- + ## -- Ref32p Table -- try: c.execute('SELECT Updated from ref32p') except sqlite3.OperationalError: c.execute('ALTER TABLE ref32p ADD COLUMN Updated TEXT') + ## -- Jobhistory Table -- + try: + c.execute('SELECT status from jobhistory') + except sqlite3.OperationalError: + c.execute('ALTER TABLE jobhistory ADD COLUMN status TEXT') + #if it's prior to Wednesday, the issue counts will be inflated by one as the online db's everywhere #prepare for the next 'new' release of a series. It's caught in updater.py, so let's just store the #value in the sql so we can display it in the details screen for everyone to wonder at. @@ -2365,6 +2447,12 @@ def dbcheck(): logger.info('Correcting Null entries that make the main page break on startup.') c.execute("UPDATE Comics SET LatestDate='Unknown' WHERE LatestDate='None' or LatestDate is NULL") + job_listing = c.execute('SELECT * FROM jobhistory') + job_history = [] + for jh in job_listing: + job_history.append(jh) + + logger.info('job_history loaded: %s' % job_history) conn.commit() c.close() @@ -2438,51 +2526,55 @@ def halt(): if __INITIALIZED__: - logger.info(u"Aborting all threads") + logger.info(u"Trying to gracefully shutdown the background schedulers...") + try: + SCHED.shutdown() + except: + SCHED.shutdown(wait=False) # abort all the threads - dbUpdateScheduler.abort = True - logger.info(u"Waiting for the DB UPDATE thread to exit") - try: - dbUpdateScheduler.thread.join(10) - except: - pass + #dbUpdateScheduler.abort = True + #logger.info(u"Waiting for the DB UPDATE thread to exit") + #try: + # dbUpdateScheduler.thread.join(10) + #except: + # pass - searchScheduler.abort = True - logger.info(u"Waiting for the SEARCH thread to exit") - try: - searchScheduler.thread.join(10) - except: - pass + #searchScheduler.abort = True + #logger.info(u"Waiting for the SEARCH thread to exit") + #try: + # searchScheduler.thread.join(10) + #except: + # pass - RSSScheduler.abort = True - logger.info(u"Waiting for the RSS CHECK thread to exit") - try: - RSSScheduler.thread.join(10) - except: - pass + #RSSScheduler.abort = True + #logger.info(u"Waiting for the RSS CHECK thread to exit") + #try: + # RSSScheduler.thread.join(10) + #except: + # pass - WeeklyScheduler.abort = True - logger.info(u"Waiting for the WEEKLY CHECK thread to exit") - try: - WeeklyScheduler.thread.join(10) - except: - pass + #WeeklyScheduler.abort = True + #logger.info(u"Waiting for the WEEKLY CHECK thread to exit") + #try: + # WeeklyScheduler.thread.join(10) + #except: + # pass - VersionScheduler.abort = True - logger.info(u"Waiting for the VERSION CHECK thread to exit") - try: - VersionScheduler.thread.join(10) - except: - pass + #VersionScheduler.abort = True + #logger.info(u"Waiting for the VERSION CHECK thread to exit") + #try: + # VersionScheduler.thread.join(10) + #except: + # pass - FolderMonitorScheduler.abort = True - logger.info(u"Waiting for the FOLDER MONITOR thread to exit") - try: - FolderMonitorScheduler.thread.join(10) - except: - pass + #FolderMonitorScheduler.abort = True + #logger.info(u"Waiting for the FOLDER MONITOR thread to exit") + #try: + # FolderMonitorScheduler.thread.join(10) + #except: + # pass if SNPOOL is not None: logger.info('Terminating the auto-snatch thread.') @@ -2503,7 +2595,7 @@ def shutdown(restart=False, update=False): cherrypy.engine.exit() - SCHED.shutdown(wait=False) + #SCHED.shutdown(wait=False) config_write() diff --git a/mylar/auth32p.py b/mylar/auth32p.py index 1f25deb9..96453abc 100644 --- a/mylar/auth32p.py +++ b/mylar/auth32p.py @@ -1,6 +1,7 @@ import urllib2 import re import time +import math import datetime import os import requests @@ -48,6 +49,10 @@ class info32p(object): self.authkey = lses.authkey self.passkey = lses.passkey self.uid = lses.uid + try: + mylar.INKDROPS_32P = int(math.floor(float(lses.inkdrops['results'][0]['inkdrops']))) + except: + mylar.INKDROPS_32P = lses.inkdrops['results'][0]['inkdrops'] self.reauthenticate = reauthenticate self.searchterm = searchterm @@ -56,7 +61,7 @@ class info32p(object): def authenticate(self): if self.test: - return True + return {'status': True, 'inkdrops': mylar.INKDROPS_32P} feedinfo = [] @@ -80,7 +85,6 @@ class info32p(object): requests.packages.urllib3.disable_warnings(InsecureRequestWarning) # post to the login form - r = s.post(self.url, verify=verify, allow_redirects=True) #logger.debug(self.module + " Content session reply" + r.text) @@ -154,7 +158,7 @@ class info32p(object): except NameError: logger.warn('Unable to retrieve information from 32Pages - either it is not responding/is down or something else is happening that is stopping me.') return - + if self.reauthenticate: return else: @@ -177,13 +181,13 @@ class info32p(object): for x in spl: publisher_search = re.sub(x, '', publisher_search).strip() logger.info('publisher search set to : ' + publisher_search) - + chk_id = None # lookup the ComicID in the 32p sqlite3 table to pull the series_id to use. if comic_id: chk_id = helpers.checkthe_id(comic_id) - - if not chk_id: + + if any([not chk_id, mylar.DEEP_SEARCH_32P is True]): #generate the dynamic name of the series here so we can match it up as_d = filechecker.FileChecker() as_dinfo = as_d.dynamic_replace(series_search) @@ -234,7 +238,7 @@ class info32p(object): pdata = [] pubmatch = False - if not chk_id: + if any([not chk_id, mylar.DEEP_SEARCH_32P is True]): if mylar.SEARCH_32P: url = 'https://32pag.es/torrents.php' #?action=serieslist&filter=' + series_search #&filter=F params = {'action': 'serieslist', 'filter': series_search} @@ -283,21 +287,14 @@ class info32p(object): if all([len(data) == 0, len(pdata) == 0]): return "no results" - - if len(pdata) == 1: - logger.info(str(len(pdata)) + ' series match the title being search for') - dataset = pdata - searchid = pdata[0]['id'] - elif len(data) == 1: - logger.info(str(len(data)) + ' series match the title being search for') - dataset = data - searchid = data[0]['id'] else: dataset = [] if len(data) > 0: dataset += data if len(pdata) > 0: dataset += pdata + logger.info('dataset: %s' % dataset) + logger.info(str(len(dataset)) + ' series match the tile being searched for on 32P...') if chk_id is None and any([len(data) == 1, len(pdata) == 1]): #update the 32p_reference so we avoid doing a url lookup next time @@ -326,8 +323,6 @@ class info32p(object): #logger.debug(self.module + ' Reply from AJAX: \n %s', d.text) except Exception as e: logger.info(self.module + ' Could not POST URL %s', url) - - try: searchResults = d.json() @@ -337,7 +332,6 @@ class info32p(object): return False #logger.debug(self.module + " Search Result: %s", searchResults) - if searchResults['status'] == 'success' and searchResults['count'] > 0: logger.info('successfully retrieved ' + str(searchResults['count']) + ' search results.') for a in searchResults['details']: @@ -392,6 +386,7 @@ class info32p(object): self.authkey = None self.passkey = None self.uid = None + self.inkdrops = None def cookie_exists(self, name): ''' @@ -459,6 +454,19 @@ class info32p(object): self.uid = j['response']['id'] self.authkey = j['response']['authkey'] self.passkey = pk = j['response']['passkey'] + + try: + d = self.ses.get('https://32pag.es/ajax.php', params={'action': 'user_inkdrops'}, verify=True, allow_redirects=True) + except Exception as e: + logger.error('Unable to retreive Inkdrop total : %s' % e) + else: + try: + self.inkdrops = d.json() + except: + logger.error('Inkdrop result did not return valid JSON, unable to verify response') + else: + logger.info('inkdrops: %s' % self.inkdrops) + return True def valid_login_attempt(self, un, pw): diff --git a/mylar/cmtagmylar.py b/mylar/cmtagmylar.py index 0613eb78..da70f76f 100644 --- a/mylar/cmtagmylar.py +++ b/mylar/cmtagmylar.py @@ -158,6 +158,7 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen original_tagoptions = tagoptions og_tagtype = None initial_ctrun = True + error_remove = False while (i <= tagcnt): if initial_ctrun: @@ -207,7 +208,12 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen if initial_ctrun and 'exported successfully' in out: logger.fdebug(module + '[COMIC-TAGGER] : ' +str(out)) #Archive exported successfully to: X-Men v4 008 (2014) (Digital) (Nahga-Empire).cbz (Original deleted) - tmpfilename = re.sub('Archive exported successfully to: ', '', out.rstrip()) + if 'Error deleting' in filepath: + tf1 = out.find('exported successfully to: ') + tmpfilename = out[tf1 + len('exported successfully to: '):].strip() + error_remove = True + else: + tmpfilename = re.sub('Archive exported successfully to: ', '', out.rstrip()) if mylar.FILE_OPTS == 'move': tmpfilename = re.sub('\(Original deleted\)', '', tmpfilename).strip() tmpf = tmpfilename.decode('utf-8') @@ -232,7 +238,7 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen if 'file is not expected size' in out: logger.fdebug('%s Output: %s' % (module,out)) tidyup(og_filepath, new_filepath, new_folder, manualmeta) - return 'fail' #'corrupt' + return 'corrupt' else: logger.warn(module + '[COMIC-TAGGER][CBR-TO-CBZ] Failed to convert cbr to cbz - check permissions on folder : ' + mylar.CACHE_DIR + ' and/or the location where Mylar is trying to tag the files from.') tidyup(og_filepath, new_filepath, new_folder, manualmeta) diff --git a/mylar/dbupdater.py b/mylar/dbupdater.py index 137b78d4..5f7b5638 100644 --- a/mylar/dbupdater.py +++ b/mylar/dbupdater.py @@ -17,15 +17,17 @@ from __future__ import with_statement import mylar -from mylar import logger +from mylar import logger, helpers #import threading class dbUpdate(): - def __init__(self): + def __init__(self, sched): + self.sched = sched pass def run(self): logger.info('[DBUpdate] Updating Database.') - mylar.updater.dbUpdate() - return + helpers.job_management(write=True, job='DB Updater', current_run=helpers.utctimestamp(), status='Running') + mylar.updater.dbUpdate(sched=self.sched) + helpers.job_management(write=True, job='DB Updater', last_run_completed=helpers.utctimestamp(), status='Waiting') diff --git a/mylar/filechecker.py b/mylar/filechecker.py index fedac0ed..e3ec5664 100755 --- a/mylar/filechecker.py +++ b/mylar/filechecker.py @@ -47,6 +47,7 @@ class FileChecker(object): #watchcomic = unicode name of series that is being searched against self.og_watchcomic = watchcomic self.watchcomic = re.sub('\?', '', watchcomic).strip() #strip the ? sepearte since it affects the regex. + self.watchcomic = re.sub(u'\u2014', ' - ', watchcomic).strip() #replace the \u2014 with a normal - because this world is f'd up enough to have something like that. self.watchcomic = unicodedata.normalize('NFKD', self.watchcomic).encode('ASCII', 'ignore') else: self.watchcomic = None @@ -91,7 +92,7 @@ class FileChecker(object): self.failed_files = [] - self.dynamic_handlers = ['/','-',':','\'',',','&','?','!','+','(',')'] + self.dynamic_handlers = ['/','-',':','\'',',','&','?','!','+','(',')','\u2014'] self.dynamic_replacements = ['and','the'] self.rippers = ['-empire','-empire-hd','minutemen-','-dcp'] @@ -502,6 +503,8 @@ class FileChecker(object): volume_found['position'] = split_file.index(volumeprior_label, current_pos -1) #if this passes, then we're ok, otherwise will try exception logger.fdebug('volume_found: ' + str(volume_found['position'])) except: + volumeprior = False + volumeprior_label = None sep_volume = False continue else: @@ -577,6 +580,9 @@ class FileChecker(object): else: raise ValueError except ValueError, e: + volumeprior = False + volumeprior_label = None + sep_volume = False pass #logger.fdebug('Error detecting issue # - ignoring this result : ' + str(sf)) @@ -855,7 +861,7 @@ class FileChecker(object): mod_series_decoded = self.dynamic_replace(series_info['series_name_decoded']) mod_seriesname_decoded = mod_dynamicinfo['mod_seriesname'] mod_watch_decoded = self.dynamic_replace(self.og_watchcomic) - mod_watchname_decoded = mod_dynamicinfo['mod_seriesname'] + mod_watchname_decoded = mod_dynamicinfo['mod_watchcomic'] #remove the spaces... nspace_seriesname = re.sub(' ', '', mod_seriesname) @@ -1037,6 +1043,7 @@ class FileChecker(object): spacer+='|' mod_watchcomic = mod_watchcomic[:wd] + spacer + mod_watchcomic[wd+len(wdrm):] + series_name = re.sub(u'\u2014', ' - ', series_name) seriesdynamic_handlers_match = [x for x in self.dynamic_handlers if x.lower() in series_name.lower()] #logger.fdebug('series dynamic handlers recognized : ' + str(seriesdynamic_handlers_match)) seriesdynamic_replacements_match = [x for x in self.dynamic_replacements if x.lower() in series_name.lower()] diff --git a/mylar/findcomicfeed.py b/mylar/findcomicfeed.py index 616e6d57..9eee2799 100755 --- a/mylar/findcomicfeed.py +++ b/mylar/findcomicfeed.py @@ -2,6 +2,7 @@ import os import sys +import time import feedparser import re import logger @@ -54,9 +55,13 @@ def Startit(searchName, searchIssue, searchYear, ComicVersion, IssDateFix): max_age = "&age=" + str(mylar.USENET_RETENTION) feeds = [] + feed1 = "http://nzbindex.nl/rss/alt.binaries.comics.dcp/?sort=agedesc&" + str(size_constraints) + str(max_age) + "&dq=%s&max=50&more=1" %joinSearch feeds.append(feedparser.parse("http://nzbindex.nl/rss/alt.binaries.comics.dcp/?sort=agedesc&" + str(size_constraints) + str(max_age) + "&dq=%s&max=50&more=1" %joinSearch)) + time.sleep(3) if mylar.ALTEXPERIMENTAL: + feed2 = "http://nzbindex.nl/rss/?dq=%s&g[]=41&g[]=510&sort=agedesc&hidespam=0&max=&more=1" %joinSearch feeds.append(feedparser.parse("http://nzbindex.nl/rss/?dq=%s&g[]=41&g[]=510&sort=agedesc&hidespam=0&max=&more=1" %joinSearch)) + time.sleep(3) entries = [] mres = {} diff --git a/mylar/helpers.py b/mylar/helpers.py index 803a6873..cc9fdb85 100755 --- a/mylar/helpers.py +++ b/mylar/helpers.py @@ -24,9 +24,11 @@ import json import re import sys import platform +import calendar import itertools import shutil import os, errno +from apscheduler.triggers.interval import IntervalTrigger import mylar import logger @@ -128,6 +130,9 @@ def now(): now = datetime.datetime.now() return now.strftime("%Y-%m-%d %H:%M:%S") +def utctimestamp(): + return time.time() + def bytes_to_mb(bytes): mb = int(bytes) /1048576 @@ -649,9 +654,9 @@ def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=N logger.fdebug('Destination: ' + dst) rename_this = {"destination_dir": dst, - "nfilename": nfilename, - "issueid": issueid, - "comicid": comicid} + "nfilename": nfilename, + "issueid": issueid, + "comicid": comicid} return rename_this @@ -1383,6 +1388,8 @@ def havetotals(refreshit=None): def filesafe(comic): import unicodedata + if u'\u2014' in comic: + comic = re.sub(u'\u2014', ' - ', comic) try: u_comic = unicodedata.normalize('NFKD', comic).encode('ASCII', 'ignore').strip() except TypeError: @@ -1969,25 +1976,44 @@ def listIssues(weeknumber, year): library = [] myDB = db.DBConnection() # Get individual issues - list = myDB.select("SELECT issues.Status, issues.ComicID, issues.IssueID, issues.ComicName, weekly.publisher, issues.Issue_Number from weekly, issues where weekly.IssueID = issues.IssueID and weeknumber = ? and year = ?", [int(weeknumber), year]) + list = myDB.select("SELECT issues.Status, issues.ComicID, issues.IssueID, issues.ComicName, issues.IssueDate, issues.ReleaseDate, weekly.publisher, issues.Issue_Number from weekly, issues where weekly.IssueID = issues.IssueID and weeknumber = ? and year = ?", [int(weeknumber), year]) for row in list: + if row['ReleaseDate'] is None: + tmpdate = row['IssueDate'] + else: + tmpdate = row['ReleaseDate'] library.append({'ComicID': row['ComicID'], - 'Status': row['Status'], - 'IssueID': row['IssueID'], - 'ComicName': row['ComicName'], - 'Publisher': row['publisher'], - 'Issue_Number': row['Issue_Number']}) + 'Status': row['Status'], + 'IssueID': row['IssueID'], + 'ComicName': row['ComicName'], + 'Publisher': row['publisher'], + 'Issue_Number': row['Issue_Number'], + 'IssueYear': tmpdate}) + # Add the annuals if mylar.ANNUALS_ON: - list = myDB.select("SELECT annuals.Status, annuals.ComicID, annuals.ReleaseComicID, annuals.IssueID, annuals.ComicName, weekly.publisher, annuals.Issue_Number from weekly, annuals where weekly.IssueID = annuals.IssueID and weeknumber = ? and year = ?", [int(weeknumber), year]) + list = myDB.select("SELECT annuals.Status, annuals.ComicID, annuals.ReleaseComicID, annuals.IssueID, annuals.ComicName, annuals.ReleaseDate, annuals.IssueDate, weekly.publisher, annuals.Issue_Number from weekly, annuals where weekly.IssueID = annuals.IssueID and weeknumber = ? and year = ?", [int(weeknumber), year]) for row in list: + if row['ReleaseDate'] is None: + tmpdate = row['IssueDate'] + else: + tmpdate = row['ReleaseDate'] library.append({'ComicID': row['ComicID'], 'Status': row['Status'], 'IssueID': row['IssueID'], 'ComicName': row['ComicName'], 'Publisher': row['publisher'], - 'Issue_Number': row['Issue_Number']}) + 'Issue_Number': row['Issue_Number'], + 'IssueYear': tmpdate}) + #tmplist = library + #librarylist = [] + #for liblist in tmplist: + # lb = myDB.select('SELECT ComicVersion, Type, ComicYear, ComicID from comics WHERE ComicID=?', [liblist['ComicID']]) + # librarylist.append(liblist) + # librarylist.update({'Comic_Volume': lb['ComicVersion'], + # 'ComicYear': lb['ComicYear'], + # 'ComicType': lb['Type']}) return library def incr_snatched(ComicID): @@ -2027,13 +2053,13 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None): #'write' - write new file #'dupe_file' - do not write new file as existing file is better quality #'dupe_src' - write new file, as existing file is a lesser quality (dupe) - rtnval = [] + if dupchk['Status'] == 'Downloaded' or dupchk['Status'] == 'Archived': try: dupsize = dupchk['ComicSize'] except: logger.info('[DUPECHECK] Duplication detection returned no hits as this is a new Snatch. This is not a duplicate.') - rtnval.append({'action': "write"}) + rtnval = {'action': "write"} logger.info('[DUPECHECK] Existing Status already set to ' + dupchk['Status']) cid = [] @@ -2051,11 +2077,11 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None): else: #file is Archived, but no entry exists in the db for the location. Assume Archived, and don't post-process. logger.fdebug('[DUPECHECK] File is Archived but no file can be located within the db at the specified location. Assuming this was a manual archival and will not post-process this issue.') - rtnval.append({'action': "dont_dupe"}) + rtnval = {'action': "dont_dupe"} else: - rtnval.append({'action': "dupe_file", - 'to_dupe': os.path.join(series['ComicLocation'], dupchk['Location'])}) + rtnval = {'action': "dupe_file", + 'to_dupe': os.path.join(series['ComicLocation'], dupchk['Location'])} else: logger.info('[DUPECHECK] Existing file within db :' + dupchk['Location'] + ' has a filesize of : ' + str(dupsize) + ' bytes.') @@ -2066,8 +2092,8 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None): logger.info('[DUPECHECK] Existing filesize is 0 as I cannot locate the original entry.') if dupchk['Status'] == 'Archived': logger.info('[DUPECHECK] Assuming issue is Archived.') - rtnval.append({'action': "dupe_file", - 'to_dupe': filename}) + rtnval = {'action': "dupe_file", + 'to_dupe': filename} return rtnval else: logger.info('[DUPECHECK] Assuming 0-byte file - this one is gonna get hammered.') @@ -2086,8 +2112,8 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None): else: #keep filename logger.info('[DUPECHECK-CBR PRIORITY] [#' + dupchk['Issue_Number'] + '] Retaining newly scanned in file : ' + filename) - rtnval.append({'action': "dupe_src", - 'to_dupe': os.path.join(series['ComicLocation'], dupchk['Location'])}) + rtnval = {'action': "dupe_src", + 'to_dupe': os.path.join(series['ComicLocation'], dupchk['Location'])} else: if dupchk['Location'].endswith('.cbz'): logger.info('[DUPECHECK-CBR PRIORITY] [#' + dupchk['Issue_Number'] + '] BOTH files are in cbz format. Retaining the larger filesize of the two.') @@ -2095,8 +2121,8 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None): else: #keep filename logger.info('[DUPECHECK-CBR PRIORITY] [#' + dupchk['Issue_Number'] + '] Retaining newly scanned in file : ' + dupchk['Location']) - rtnval.append({'action': "dupe_file", - 'to_dupe': filename}) + rtnval = {'action': "dupe_file", + 'to_dupe': filename} elif 'cbz' in mylar.DUPECONSTRAINT: if filename.endswith('.cbr'): @@ -2106,8 +2132,8 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None): else: #keep filename logger.info('[DUPECHECK-CBZ PRIORITY] [#' + dupchk['Issue_Number'] + '] Retaining currently scanned in filename : ' + dupchk['Location']) - rtnval.append({'action': "dupe_file", - 'to_dupe': filename}) + rtnval = {'action': "dupe_file", + 'to_dupe': filename} else: if dupchk['Location'].endswith('.cbz'): logger.info('[DUPECHECK-CBZ PRIORITY] [#' + dupchk['Issue_Number'] + '] BOTH files are in cbz format. Retaining the larger filesize of the two.') @@ -2115,22 +2141,22 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None): else: #keep filename logger.info('[DUPECHECK-CBZ PRIORITY] [#' + dupchk['Issue_Number'] + '] Retaining newly scanned in filename : ' + filename) - rtnval.append({'action': "dupe_src", - 'to_dupe': os.path.join(series['ComicLocation'], dupchk['Location'])}) + rtnval = {'action': "dupe_src", + 'to_dupe': os.path.join(series['ComicLocation'], dupchk['Location'])} if mylar.DUPECONSTRAINT == 'filesize' or tmp_dupeconstraint == 'filesize': if filesz <= int(dupsize) and int(dupsize) != 0: logger.info('[DUPECHECK-FILESIZE PRIORITY] [#' + dupchk['Issue_Number'] + '] Retaining currently scanned in filename : ' + dupchk['Location']) - rtnval.append({'action': "dupe_file", - 'to_dupe': filename}) + rtnval = {'action': "dupe_file", + 'to_dupe': filename} else: logger.info('[DUPECHECK-FILESIZE PRIORITY] [#' + dupchk['Issue_Number'] + '] Retaining newly scanned in filename : ' + filename) - rtnval.append({'action': "dupe_src", - 'to_dupe': os.path.join(series['ComicLocation'], dupchk['Location'])}) + rtnval = {'action': "dupe_src", + 'to_dupe': os.path.join(series['ComicLocation'], dupchk['Location'])} else: logger.info('[DUPECHECK] Duplication detection returned no hits. This is not a duplicate of anything that I have scanned in as of yet.') - rtnval.append({'action': "write"}) + rtnval = {'action': "write"} return rtnval def create_https_certificates(ssl_cert, ssl_key): @@ -2676,17 +2702,21 @@ def torrentinfo(issueid=None, torrent_hash=None, download=False, monitor=False): curScriptName = shell_cmd + ' ' + str(mylar.AUTO_SNATCH_SCRIPT).decode("string_escape") if torrent_files > 1: - downlocation = torrent_folder + downlocation = torrent_folder.encode('utf-8') else: - downlocation = torrent_info['files'][0] #os.path.join(torrent_folder, torrent_info['name']) + if mylar.USE_DELUGE: + downlocation = os.path.join(torrent_folder, torrent_info['files'][0]['path']) + else: + downlocation = torrent_info['files'][0].encode('utf-8') - downlocation = re.sub("'", "\\'", downlocation) - downlocation = re.sub("&", "\\&", downlocation) + os.environ['downlocation'] = re.sub("'", "\\'",downlocation) + #downlocation = re.sub("\'", "\\'", downlocation) + #downlocation = re.sub("&", "\&", downlocation) - script_cmd = shlex.split(curScriptName, posix=False) + [downlocation] + script_cmd = shlex.split(curScriptName, posix=False) # + [downlocation] logger.fdebug(u"Executing command " +str(script_cmd)) try: - p = subprocess.Popen(script_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=mylar.PROG_DIR) + p = subprocess.Popen(script_cmd, env=dict(os.environ), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=mylar.PROG_DIR) out, err = p.communicate() logger.fdebug(u"Script result: " + out) except OSError, e: @@ -2947,6 +2977,169 @@ def get_the_hash(filepath): logger.info('Hash of file : ' + thehash) return {'hash': thehash} +def date_conversion(originaldate): + c_obj_date = datetime.datetime.strptime(originaldate, "%Y-%m-%d %H:%M:%S") + n_date = datetime.datetime.now() + absdiff = abs(n_date - c_obj_date) + hours = (absdiff.days * 24 * 60 * 60 + absdiff.seconds) / 3600.0 + return hours + +def job_management(write=False, job=None, last_run_completed=None, current_run=None, status=None): + jobresults = [] + + import db + myDB = db.DBConnection() + + if job is None: + dbupdate_newstatus = 'Waiting' + dbupdate_nextrun = None + rss_newstatus = 'Waiting' + rss_nextrun = None + weekly_newstatus = 'Waiting' + weekly_nextrun = None + search_newstatus = 'Waiting' + search_nextrun = None + version_newstatus = 'Waiting' + version_nextrun = None + monitor_newstatus = 'Waiting' + monitor_nextrun = None + + job_info = myDB.select('select * from jobhistory') + #set default values if nothing has been ran yet + for ji in job_info: + if 'update' in ji['JobName'].lower(): + if mylar.SCHED_DBUPDATE_LAST is None: + mylar.SCHED_DBUPDATE_LAST = ji['prev_run_timestamp'] + dbupdate_newstatus = ji['status'] + dbupdate_nextrun = ji['next_run_timestamp'] + elif 'search' in ji['JobName'].lower(): + if mylar.SCHED_SEARCH_LAST is None: + mylar.SCHED_SEARCH_LAST = ji['prev_run_timestamp'] + search_newstatus = ji['status'] + search_nextrun = ji['next_run_timestamp'] + elif 'rss' in ji['JobName'].lower(): + if mylar.SCHED_RSS_LAST is None: + mylar.SCHED_RSS_LAST = ji['prev_run_timestamp'] + rss_newstatus = ji['status'] + rss_nextrun = ji['next_run_timestamp'] + elif 'weekly' in ji['JobName'].lower(): + if mylar.SCHED_WEEKLY_LAST is None: + mylar.SCHED_WEEKLY_LAST = ji['prev_run_timestamp'] + weekly_newstatus = ji['status'] + weekly_nextrun = ji['next_run_timestamp'] + elif 'version' in ji['JobName'].lower(): + if mylar.SCHED_VERSION_LAST is None: + mylar.SCHED_VERSION_LAST = ji['prev_run_timestamp'] + version_newstatus = ji['status'] + version_nextrun = ji['next_run_timestamp'] + elif 'monitor' in ji['JobName'].lower(): + if mylar.SCHED_MONITOR_LAST is None: + mylar.SCHED_MONITOR_LAST = ji['prev_run_timestamp'] + monitor_newstatus = ji['status'] + monitor_nextrun = ji['next_run_timestamp'] + + #this is for initial startup + for jb in mylar.SCHED.get_jobs(): + #logger.fdebug('jb: %s' % jb) + jobinfo = str(jb) + if 'update' in jobinfo.lower(): + prev_run_timestamp = mylar.SCHED_DBUPDATE_LAST + newstatus = dbupdate_newstatus + elif 'search' in jobinfo.lower(): + prev_run_timestamp = mylar.SCHED_SEARCH_LAST + newstatus = search_newstatus + elif 'rss' in jobinfo.lower(): + prev_run_timestamp = mylar.SCHED_RSS_LAST + newstatus = rss_newstatus + elif 'weekly' in jobinfo.lower(): + prev_run_timestamp = mylar.SCHED_WEEKLY_LAST + newstatus = weekly_newstatus + elif 'version' in jobinfo.lower(): + prev_run_timestamp = mylar.SCHED_VERSION_LAST + newstatus = version_newstatus + elif 'monitor' in jobinfo.lower(): + prev_run_timestamp = mylar.SCHED_MONITOR_LAST + newstatus = monitor_newstatus + jobname = jobinfo[:jobinfo.find('(')-1].strip() + #logger.fdebug('jobinfo: %s' % jobinfo) + jobtimetmp = jobinfo.split('at: ')[1].split('.')[0].strip() + #logger.fdebug('jobtimetmp: %s' % jobtimetmp) + jobtime = float(calendar.timegm(datetime.datetime.strptime(jobtimetmp[:-1], '%Y-%m-%d %H:%M:%S %Z').timetuple())) + #logger.fdebug('jobtime: %s' % jobtime) + + if prev_run_timestamp is not None: + prev_run_time_utc = datetime.datetime.utcfromtimestamp(float(prev_run_timestamp)) + else: + prev_run_time_utc = None + #logger.fdebug('prev_run_time: %s' % prev_run_timestamp) + #logger.fdebug('prev_run_time type: %s' % type(prev_run_timestamp)) + jobresults.append({'jobname': jobname, + 'next_run_datetime': datetime.datetime.utcfromtimestamp(jobtime), + 'prev_run_datetime': prev_run_time_utc, + 'next_run_timestamp': jobtime, + 'prev_run_timestamp': prev_run_timestamp, + 'status': newstatus}) + + if not write: + #logger.info('jobresults: %s' % jobresults) + return jobresults + else: + if job is None: + for x in jobresults: + updateCtrl = {'JobName': x['jobname']} + updateVals = {'next_run_timestamp': x['next_run_timestamp'], + 'prev_run_timestamp': x['prev_run_timestamp'], + 'next_run_datetime': x['next_run_datetime'], + 'prev_run_datetime': x['prev_run_datetime'], + 'status': x['status']} + + myDB.upsert('jobhistory', updateVals, updateCtrl) + else: + #logger.fdebug('Updating info - job: %s' % job) + #logger.fdebug('Updating info - last run: %s' % last_run_completed) + #logger.fdebug('Updating info - status: %s' % status) + updateCtrl = {'JobName': job} + if current_run is not None: + updateVals = {'prev_run_timestamp': current_run, + 'prev_run_datetime': datetime.datetime.utcfromtimestamp(current_run), + 'status': status} + #logger.info('updateVals: %s' % updateVals) + elif last_run_completed is not None: + if job == 'DB Updater': + mylar.SCHED.reschedule_job('dbupdater', trigger=IntervalTrigger(hours=0, minutes=5, timezone='UTC')) + nextrun_stamp = utctimestamp() + (5 * 60) + elif job == 'Auto-Search': + mylar.SCHED.reschedule_job('search', trigger=IntervalTrigger(hours=0, minutes=mylar.SEARCH_INTERVAL, timezone='UTC')) + nextrun_stamp = utctimestamp() + (mylar.SEARCH_INTERVAL * 60) + elif job == 'RSS Feeds': + mylar.SCHED.reschedule_job('rss', trigger=IntervalTrigger(hours=0, minutes=int(mylar.RSS_CHECKINTERVAL), timezone='UTC')) + nextrun_stamp = utctimestamp() + (int(mylar.RSS_CHECKINTERVAL) * 60) + elif job == 'Weekly Pullist': + if mylar.ALT_PULL == 2: + wkt = 4 + else: + wkt = 24 + mylar.SCHED.reschedule_job('weekly', trigger=IntervalTrigger(hours=wkt, minutes=mylar.SEARCH_INTERVAL, timezone='UTC')) + nextrun_stamp = utctimestamp() + (wkt * 60 * 60) + elif job == 'Check Version': + mylar.SCHED.reschedule_job('version', trigger=IntervalTrigger(hours=0, minutes=mylar.CHECK_GITHUB_INTERVAL, timezone='UTC')) + nextrun_stamp = utctimestamp() + (mylar.CHECK_GITHUB_INTERVAL * 60) + elif job == 'Folder Monitor': + mylar.SCHED.reschedule_job('monitor', trigger=IntervalTrigger(hours=0, minutes=int(mylar.DOWNLOAD_SCAN_INTERVAL), timezone='UTC')) + nextrun_stamp = utctimestamp() + (int(mylar.DOWNLOAD_SCAN_INTERVAL) * 60) + + nextrun_date = datetime.datetime.utcfromtimestamp(nextrun_stamp) + logger.fdebug('ReScheduled job: %s to %s' % (job, nextrun_date)) + #if it's completed, then update the last run time to the ending time of the job + updateVals = {'prev_run_timestamp': last_run_completed, + 'prev_run_datetime': datetime.datetime.utcfromtimestamp(last_run_completed), + 'last_run_completed': 'True', + 'next_run_timestamp': nextrun_stamp, + 'next_run_datetime': nextrun_date, + 'status': status} + + #logger.fdebug('Job update for %s: %s' % (updateCtrl, updateVals)) + myDB.upsert('jobhistory', updateVals, updateCtrl) def file_ops(path,dst,arc=False,one_off=False): # # path = source path + filename diff --git a/mylar/rsscheck.py b/mylar/rsscheck.py index 66110859..f21383dd 100755 --- a/mylar/rsscheck.py +++ b/mylar/rsscheck.py @@ -831,7 +831,7 @@ def nzbdbsearch(seriesname, issue, comicid=None, nzbprov=None, searchYear=None, nzbinfo['entries'] = nzbtheinfo return nzbinfo -def torsend2client(seriesname, issue, seriesyear, linkit, site): +def torsend2client(seriesname, issue, seriesyear, linkit, site, pubhash=None): logger.info('matched on ' + seriesname) filename = helpers.filesafe(seriesname) filename = re.sub(' ', '_', filename) @@ -1119,8 +1119,8 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site): try: dc = deluge.TorrentClient() if not dc.connect(mylar.DELUGE_HOST, mylar.DELUGE_USERNAME, mylar.DELUGE_PASSWORD): - return "fail" logger.info('Not connected to Deluge!') + return "fail" else: logger.info('Connected to Deluge! Will try to add torrent now!') torrent_info = dc.load_torrent(filepath) @@ -1159,8 +1159,11 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site): elif mylar.USE_WATCHDIR: if mylar.TORRENT_LOCAL: - #get the hash so it doesn't mess up... - torrent_info = helpers.get_the_hash(filepath) + if nzbprov == 'TPSE': + torrent_info = {'hash': pubhash} + else: + #get the hash so it doesn't mess up... + torrent_info = helpers.get_the_hash(filepath) torrent_info['clientmode'] = 'watchdir' torrent_info['link'] = linkit torrent_info['filepath'] = filepath diff --git a/mylar/rsscheckit.py b/mylar/rsscheckit.py index 28ad0ef9..94580be5 100755 --- a/mylar/rsscheckit.py +++ b/mylar/rsscheckit.py @@ -24,54 +24,51 @@ rss_lock = threading.Lock() class tehMain(): - def __init__(self, forcerss=None): - - self.forcerss = forcerss - - def run(self): + def __init__(self): + pass + def run(self, forcerss=None): + logger.info('forcerss is : %s' % forcerss) with rss_lock: - logger.info('RSS Feed Check was last run at : ' + str(mylar.RSS_LASTRUN)) + logger.info('[RSS-FEEDS] RSS Feed Check was last run at : ' + str(mylar.SCHED_RSS_LAST)) firstrun = "no" #check the last run of rss to make sure it's not hammering. - if mylar.RSS_LASTRUN is None or mylar.RSS_LASTRUN == '' or mylar.RSS_LASTRUN == '0' or self.forcerss == True: - logger.info('RSS Feed Check First Ever Run.') + if mylar.SCHED_RSS_LAST is None or mylar.SCHED_RSS_LAST == '' or mylar.SCHED_RSS_LAST == '0' or forcerss == True: + logger.info('[RSS-FEEDS] RSS Feed Check Initalizing....') firstrun = "yes" - mins = 0 + duration_diff = 0 else: - c_obj_date = datetime.datetime.strptime(mylar.RSS_LASTRUN, "%Y-%m-%d %H:%M:%S") - n_date = datetime.datetime.now() - absdiff = abs(n_date - c_obj_date) - mins = (absdiff.days * 24 * 60 * 60 + absdiff.seconds) / 60.0 #3600 is for hours. - - if firstrun == "no" and mins < int(mylar.RSS_CHECKINTERVAL): - logger.fdebug('RSS Check has taken place less than the threshold - not initiating at this time.') + tstamp = float(mylar.SCHED_RSS_LAST) + duration_diff = abs(helpers.utctimestamp() - tstamp)/60 + logger.fdebug('[RSS-FEEDS] Duration diff: %s' % duration_diff) + if firstrun == "no" and duration_diff < int(mylar.RSS_CHECKINTERVAL): + logger.fdebug('[RSS-FEEDS] RSS Check has taken place less than the threshold - not initiating at this time.') return - mylar.RSS_LASTRUN = helpers.now() - logger.fdebug('Updating RSS Run time to : ' + str(mylar.RSS_LASTRUN)) - mylar.config_write() + helpers.job_management(write=True, job='RSS Feeds', current_run=helpers.utctimestamp(), status='Running') + mylar.RSS_STATUS = 'Running' + logger.fdebug('[RSS-FEEDS] Updated RSS Run time to : ' + str(mylar.SCHED_RSS_LAST)) #function for looping through nzbs/torrent feeds if mylar.ENABLE_TORRENT_SEARCH: - logger.info('[RSS] Initiating Torrent RSS Check.') + logger.info('[RSS-FEEDS] Initiating Torrent RSS Check.') if mylar.ENABLE_TPSE: - logger.info('[RSS] Initiating Torrent RSS Feed Check on TorrentProject.') + logger.info('[RSS-FEEDS] Initiating Torrent RSS Feed Check on TorrentProject.') #rsscheck.torrents(pickfeed='3') #TP.SE RSS Check (has to be page-parsed) rsscheck.torrents(pickfeed='TPSE') #TPSE = DEM RSS Check + WWT RSS Check if mylar.ENABLE_32P: - logger.info('[RSS] Initiating Torrent RSS Feed Check on 32P.') + logger.info('[RSS-FEEDS] Initiating Torrent RSS Feed Check on 32P.') if mylar.MODE_32P == 0: - logger.fdebug('[RSS] 32P mode set to Legacy mode. Monitoring New Releases feed only.') + logger.fdebug('[RSS-FEEDS] 32P mode set to Legacy mode. Monitoring New Releases feed only.') if any([mylar.PASSKEY_32P is None, mylar.PASSKEY_32P == '', mylar.RSSFEED_32P is None, mylar.RSSFEED_32P == '']): - logger.error('[RSS] Unable to validate information from provided RSS Feed. Verify that the feed provided is a current one.') + logger.error('[RSS-FEEDS] Unable to validate information from provided RSS Feed. Verify that the feed provided is a current one.') else: rsscheck.torrents(pickfeed='1', feedinfo=mylar.KEYS_32P) else: - logger.fdebug('[RSS] 32P mode set to Auth mode. Monitoring all personal notification feeds & New Releases feed') + logger.fdebug('[RSS-FEEDS] 32P mode set to Auth mode. Monitoring all personal notification feeds & New Releases feed') if any([mylar.USERNAME_32P is None, mylar.USERNAME_32P == '', mylar.PASSWORD_32P is None]): - logger.error('[RSS] Unable to sign-on to 32P to validate settings. Please enter/check your username password in the configuration.') + logger.error('[RSS-FEEDS] Unable to sign-on to 32P to validate settings. Please enter/check your username password in the configuration.') else: if mylar.KEYS_32P is None: feed32p = auth32p.info32p() @@ -83,7 +80,7 @@ class tehMain(): feedinfo = mylar.FEEDINFO_32P if feedinfo is None or len(feedinfo) == 0 or feedinfo == "disable": - logger.error('[RSS] Unable to retrieve any information from 32P for RSS Feeds. Skipping for now.') + logger.error('[RSS-FEEDS] Unable to retrieve any information from 32P for RSS Feeds. Skipping for now.') else: rsscheck.torrents(pickfeed='1', feedinfo=feedinfo[0]) x = 0 @@ -93,12 +90,14 @@ class tehMain(): pfeed_32p = str(7 + x) rsscheck.torrents(pickfeed=pfeed_32p, feedinfo=fi) - logger.info('[RSS] Initiating RSS Feed Check for NZB Providers.') - rsscheck.nzbs(forcerss=self.forcerss) - logger.info('[RSS] RSS Feed Check/Update Complete') - logger.info('[RSS] Watchlist Check for new Releases') + logger.info('[RSS-FEEDS] Initiating RSS Feed Check for NZB Providers.') + rsscheck.nzbs(forcerss=forcerss) + logger.info('[RSS-FEEDS] RSS Feed Check/Update Complete') + logger.info('[RSS-FEEDS] Watchlist Check for new Releases') mylar.search.searchforissue(rsscheck='yes') - logger.info('[RSS] Watchlist Check complete.') - if self.forcerss: - logger.info('[RSS] Successfully ran a forced RSS Check.') - return + logger.info('[RSS-FEEDS] Watchlist Check complete.') + if forcerss: + logger.info('[RSS-FEEDS] Successfully ran a forced RSS Check.') + helpers.job_management(write=True, job='RSS Feeds', last_run_completed=helpers.utctimestamp(), status='Waiting') + mylar.RSS_STATUS = 'Waiting' + return True diff --git a/mylar/search.py b/mylar/search.py index 6cf641fe..6894ccd0 100755 --- a/mylar/search.py +++ b/mylar/search.py @@ -2263,7 +2263,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc logger.fdebug("link:" + link) logger.fdebug("Torrent Provider:" + nzbprov) - rcheck = rsscheck.torsend2client(ComicName, IssueNumber, comyear, link, nzbprov) + rcheck = rsscheck.torsend2client(ComicName, IssueNumber, comyear, link, nzbprov, nzbid) #nzbid = hash for usage with public torrents if rcheck == "fail": if mylar.FAILED_DOWNLOAD_HANDLING: logger.error('Unable to send torrent to client. Assuming incomplete link - sending to Failed Handler and continuing search.') diff --git a/mylar/searchit.py b/mylar/searchit.py index eb49cf61..60d1b294 100755 --- a/mylar/searchit.py +++ b/mylar/searchit.py @@ -17,7 +17,7 @@ from __future__ import with_statement import mylar -from mylar import logger +from mylar import logger, helpers class CurrentSearcher(): def __init__(self, **kwargs): @@ -26,4 +26,9 @@ class CurrentSearcher(): def run(self): logger.info('[SEARCH] Running Search for Wanted.') + helpers.job_management(write=True, job='Auto-Search', current_run=helpers.utctimestamp(), status='Running') + mylar.SEARCH_STATUS = 'Running' mylar.search.searchforissue() + helpers.job_management(write=True, job='Auto-Search', last_run_completed=helpers.utctimestamp(), status='Waiting') + mylar.SEARCH_STATUS = 'Waiting' + #mylar.SCHED_SEARCH_LAST = helpers.now() diff --git a/mylar/torrent/clients/deluge.py b/mylar/torrent/clients/deluge.py index 2c7a8187..51f65b3e 100644 --- a/mylar/torrent/clients/deluge.py +++ b/mylar/torrent/clients/deluge.py @@ -146,7 +146,7 @@ class TorrentClient(object): else: logger.info('Torrent successfully added!') return {'hash': torrent_info['hash'], - 'label': torrent_info['label'], + 'label': mylar.DELUGE_LABEL, 'folder': torrent_info['save_path'], 'total_filesize': torrent_info['total_size'], 'name': torrent_info['name'], diff --git a/mylar/torrent/clients/rtorrent.py b/mylar/torrent/clients/rtorrent.py index a5de44fc..7f788703 100755 --- a/mylar/torrent/clients/rtorrent.py +++ b/mylar/torrent/clients/rtorrent.py @@ -45,18 +45,26 @@ class TorrentClient(object): logger.info(url) if username and password: + logger.info('username: %s / password: %s' % (username, 'redacted')) try: self.conn = RTorrent( url,(auth, username, password), verify_server=True, verify_ssl=self.getVerifySsl() ) - except: + except Exception as err: + logger.error('Failed to connect to rTorrent: %s', err) return False else: + logger.info('NO username %s / NO password %s' % (username, password)) try: - self.conn = RTorrent(host) - except: + self.conn = RTorrent( + url, (auth, username, password), + verify_server=True, + verify_ssl=self.getVerifySsl() + ) + except Exception as err: + logger.error('Failed to connect to rTorrent: %s', err) return False return self.conn diff --git a/mylar/updater.py b/mylar/updater.py index afd9b1cd..3184cd13 100755 --- a/mylar/updater.py +++ b/mylar/updater.py @@ -17,6 +17,7 @@ import datetime from xml.dom.minidom import parseString import urllib2 import shlex +import operator import re import os import itertools @@ -24,17 +25,16 @@ import itertools import mylar from mylar import db, logger, helpers, filechecker -def dbUpdate(ComicIDList=None, calledfrom=None): +def dbUpdate(ComicIDList=None, calledfrom=None, sched=False): if mylar.IMPORTLOCK: logger.info('Import is currently running - deferring this until the next scheduled run sequence.') return myDB = db.DBConnection() - #print "comicidlist:" + str(ComicIDList) if ComicIDList is None: if mylar.UPDATE_ENDED: logger.info('Updating only Continuing Series (option enabled) - this might cause problems with the pull-list matching for rebooted series') comiclist = [] - completelist = myDB.select('SELECT LatestDate, ComicPublished, ForceContinuing, NewPublish, LastUpdated, ComicID, ComicName, Corrected_SeriesYear, ComicYear from comics WHERE Status="Active" or Status="Loading" order by LatestDate DESC, LastUpdated ASC') + completelist = myDB.select('SELECT LatestDate, ComicPublished, ForceContinuing, NewPublish, LastUpdated, ComicID, ComicName, Corrected_SeriesYear, ComicYear from comics WHERE Status="Active" or Status="Loading" order by LastUpdated DESC, LatestDate ASC') for comlist in completelist: if comlist['LatestDate'] is None: recentstatus = 'Loading' @@ -66,19 +66,22 @@ def dbUpdate(ComicIDList=None, calledfrom=None): "Corrected_SeriesYear": comlist['Corrected_SeriesYear']}) else: - comiclist = myDB.select('SELECT LatestDate, LastUpdated, ComicID, ComicName, ComicYear, Corrected_SeriesYear from comics WHERE Status="Active" or Status="Loading" order by LatestDate DESC, LastUpdated ASC') + comiclist = myDB.select('SELECT LatestDate, LastUpdated, ComicID, ComicName, ComicYear, Corrected_SeriesYear from comics WHERE Status="Active" or Status="Loading" order by LastUpdated DESC, latestDate ASC') else: comiclist = [] comiclisting = ComicIDList for cl in comiclisting: - comiclist += myDB.select('SELECT ComicID, ComicName, ComicYear, Corrected_SeriesYear from comics WHERE ComicID=?', [cl]) + comiclist += myDB.select('SELECT ComicID, ComicName, ComicYear, Corrected_SeriesYear, LastUpdated from comics WHERE ComicID=? order by LastUpdated DESC, LatestDate ASC', [cl]) - if calledfrom is None: + if all([sched is False, calledfrom is None]): logger.info('Starting update for %i active comics' % len(comiclist)) cnt = 1 - for comic in comiclist: + for comic in sorted(comiclist, key=operator.itemgetter('LastUpdated'), reverse=True): + if sched is True: + # since this runs every 5 minutes, take the 1st entry only... + logger.info('[UPDATER] Starting update for %s [%s] - last updated: %s' % (comiclist[0]['ComicName'], comiclist[0]['ComicYear'], comiclist[0]['LastUpdated'])) dspyear = comic['ComicYear'] csyear = None @@ -100,14 +103,14 @@ def dbUpdate(ComicIDList=None, calledfrom=None): absdiff = abs(n_date - c_obj_date) hours = (absdiff.days * 24 * 60 * 60 + absdiff.seconds) / 3600.0 if hours < 5: - logger.info(ComicName + '[' + str(ComicID) + '] Was refreshed less than 5 hours ago. Skipping Refresh at this time.') + logger.fdebug(ComicName + '[' + str(ComicID) + '] Was refreshed less than 5 hours ago. Skipping Refresh at this time.') cnt +=1 continue logger.info('[' + str(cnt) + '/' + str(len(comiclist)) + '] Refreshing :' + ComicName + ' (' + str(dspyear) + ') [' + str(ComicID) + ']') else: ComicID = comic['ComicID'] ComicName = comic['ComicName'] - + logger.fdebug('Refreshing: ' + ComicName + ' (' + str(dspyear) + ') [' + str(ComicID) + ']') mismatch = "no" @@ -124,6 +127,9 @@ def dbUpdate(ComicIDList=None, calledfrom=None): cchk = importer.addComictoDB(ComicID, mismatch) else: if mylar.CV_ONETIMER == 1: + if sched is True: + helpers.job_management(write=True, job='DB Updater', current_run=helpers.utctimestamp(), status='Running') + mylar.UPDATER_STATUS = 'Running' logger.fdebug("CV_OneTimer option enabled...") #in order to update to JUST CV_ONLY, we need to delete the issues for a given series so it's a clea$ logger.fdebug("Gathering the status of all issues for the series.") @@ -289,7 +295,12 @@ def dbUpdate(ComicIDList=None, calledfrom=None): cchk = mylar.importer.addComictoDB(ComicID, mismatch) cnt +=1 - time.sleep(15) #pause for 15 secs so dont hammer CV and get 500 error + if sched is False: + time.sleep(15) #pause for 15 secs so dont hammer CV and get 500 error + else: + helpers.job_management(write=True, job='DB Updater', last_run_completed=helpers.utctimestamp(), status='Waiting') + mylar.UPDATER_STATUS = 'Waiting' + break logger.info('Update complete') @@ -363,7 +374,6 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None, return else: issuechk = myDB.selectone("SELECT * FROM issues WHERE ComicID=? AND Issue_Number=?", [ComicID, IssueNumber]).fetchone() - if issuechk is None and altissuenumber is not None: logger.info('altissuenumber is : ' + str(altissuenumber)) issuechk = myDB.selectone("SELECT * FROM issues WHERE ComicID=? AND Int_IssueNumber=?", [ComicID, helpers.issuedigits(altissuenumber)]).fetchone() @@ -396,17 +406,16 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None, cchk = mylar.importer.updateissuedata(ComicID, ComicName, calledfrom='weeklycheck')#mylar.importer.addComictoDB(ComicID,mismatch,pullupd) else: logger.fdebug('It has not been longer than 5 hours since we last did this...we will wait so we do not hammer things.') - else: logger.fdebug('[WEEKLY-PULL] Walksoftly has been enabled. ComicID/IssueID control given to the ninja to monitor.') - #logger.fdebug('hours: ' + str(hours) + ' -- forcecheck: ' + str(forcecheck)) + logger.fdebug('hours: ' + str(hours) + ' -- forcecheck: ' + str(forcecheck)) if hours > 2 or forcecheck == 'yes': logger.fdebug('weekinfo:' + str(weekinfo)) mylar.PULL_REFRESH = datetime.datetime.today() #update the PULL_REFRESH mylar.config_write() chkitout = mylar.locg.locg(weeknumber=str(weekinfo['weeknumber']),year=str(weekinfo['year'])) - + logger.fdebug('linking ComicID to Pull-list to reflect status.') downstats = {"ComicID": ComicID, "IssueID": None, @@ -424,7 +433,7 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None, if issuechk['Issue_Number'] == IssueNumber or issuechk['Issue_Number'] == altissuenumber: og_status = issuechk['Status'] #check for 'out-of-whack' series here. - whackness = dbUpdate([ComicID], calledfrom='weekly') + whackness = dbUpdate([ComicID], calledfrom='weekly', sched=False) if whackness == True: if any([issuechk['Status'] == 'Downloaded', issuechk['Status'] == 'Archived', issuechk['Status'] == 'Snatched']): logger.fdebug('Forcibly maintaining status of : ' + og_status + ' for #' + issuechk['Issue_Number'] + ' to ensure integrity.') @@ -473,7 +482,7 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None, newValue['Status'] = "Skipped" #was in wrong place :( else: - logger.fdebug('Issues do not match for some reason...weekly new issue: ' + str(IssueNumber)) + logger.fdebug('Issues do not match for some reason...weekly new issue: %s' % IssueNumber) return if mylar.AUTOWANT_UPCOMING: diff --git a/mylar/versioncheckit.py b/mylar/versioncheckit.py index 8ae19053..eae76e66 100644 --- a/mylar/versioncheckit.py +++ b/mylar/versioncheckit.py @@ -17,14 +17,18 @@ from __future__ import with_statement import mylar -from mylar import logger +from mylar import logger, helpers, versioncheck -#import threading class CheckVersion(): def __init__(self): pass def run(self): logger.info('[VersionCheck] Checking for new release on Github.') - mylar.versioncheck.checkGithub() + helpers.job_management(write=True, job='Check Version', current_run=helpers.utctimestamp(), status='Running') + mylar.VERSION_STATUS = 'Running' + versioncheck.checkGithub() + helpers.job_management(write=True, job='Check Version', last_run_completed=helpers.utctimestamp(), status='Waiting') + mylar.VERSION_STATUS = 'Waiting' + logger.info('updated') return diff --git a/mylar/webserve.py b/mylar/webserve.py index ddaf069c..eda35987 100644 --- a/mylar/webserve.py +++ b/mylar/webserve.py @@ -37,7 +37,7 @@ import shutil import mylar -from mylar import logger, db, importer, mb, search, filechecker, helpers, updater, parseit, weeklypull, PostProcessor, librarysync, moveit, Failed, readinglist, notifiers #,rsscheck +from mylar import logger, db, importer, mb, search, filechecker, helpers, updater, parseit, weeklypull, PostProcessor, librarysync, moveit, Failed, readinglist, notifiers import simplejson as simplejson @@ -1033,9 +1033,8 @@ class WebInterface(object): #forcerss = True #threading.Thread(target=mylar.rsscheck.tehMain, args=[True]).start() #this is for use with the new scheduler not in place yet. - forcethis = mylar.rsscheckit.tehMain(forcerss=True) - threading.Thread(target=forcethis.run).start() - return + forcethis = mylar.rsscheckit.tehMain() + threading.Thread(target=forcethis.run, args=[True]).start() force_rss.exposed = True def markannuals(self, ann_action=None, **args): @@ -1234,7 +1233,6 @@ class WebInterface(object): else: if mylar.ENABLE_SNATCH_SCRIPT: #packs not supported on retry atm - Volume and Issuedate also not included due to limitations... - snatch_vars = {'comicinfo': {'comicname': ComicName, 'issuenumber': IssueNumber, 'seriesyear': ComicYear, @@ -1257,9 +1255,9 @@ class WebInterface(object): logger.info('Successfully retried issue.') break else: - ckthis = myDB.selectone('SELECT a.ComicID, a.ComicName, a.ComicVersion, a.ComicYear, b.IssueID, b.IssueNumber, b.IssueDate FROM comics as a INNER JOIN annuals as b ON a.ComicID = b.ComicID WHERE IssueID=?', [IssueID]).fetchone() - if ckthis is None: - ckthis = myDB.selectone('SELECT a.ComicID, a.ComicName, a.Volume, a.ComicYear, b.IssueID, b.IssueNumber, b.IssueDate FROM comics as a INNER JOIN issues as b ON a.ComicID = b.ComicID WHERE IssueID=?', [IssueID]).fetchone() + chkthis = myDB.selectone('SELECT a.ComicID, a.ComicName, a.ComicVersion, a.ComicYear, b.IssueID, b.Issue_Number, b.IssueDate FROM comics as a INNER JOIN annuals as b ON a.ComicID = b.ComicID WHERE IssueID=?', [IssueID]).fetchone() + if chkthis is None: + chkthis = myDB.selectone('SELECT a.ComicID, a.ComicName, a.ComicVersion, a.ComicYear, b.IssueID, b.Issue_Number, b.IssueDate FROM comics as a INNER JOIN issues as b ON a.ComicID = b.ComicID WHERE IssueID=?', [IssueID]).fetchone() modcomicname = chkthis['ComicName'] else: modcomicname = chkthis['ComicName'] + ' Annual' @@ -1267,10 +1265,12 @@ class WebInterface(object): comicinfo = [] comicinfo.append({"ComicName": chkthis['ComicName'], "ComicVolume": chkthis['ComicVersion'], - "IssueNumber": chkthis['IssueNumber'], + "IssueNumber": chkthis['Issue_Number'], "comyear": chkthis['ComicYear'], "IssueDate": chkthis['IssueDate'], - "modcomicname": modcomicname}) + "pack": False, + "modcomicname": modcomicname, + "oneoff": False}) newznabinfo = None @@ -1518,7 +1518,7 @@ class WebInterface(object): myDB.upsert("annuals", newValueDict, controlValueDict) else: myDB.upsert("issues", newValueDict, controlValueDict) - raise cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % ComicID) + #cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % ComicID) else: #if ComicName is not None, then it's from the FuturePull list that we're 'unwanting' an issue. #ComicID may be present if it's a watch from the Watchlist, otherwise it won't exist. @@ -1610,17 +1610,18 @@ class WebInterface(object): return pullSearch.exposed = True - def pullist(self, week=None, year=None): + def pullist(self, week=None, year=None, generateonly=False): myDB = db.DBConnection() - autowants = myDB.select("SELECT * FROM futureupcoming WHERE Status='Wanted'") autowant = [] - if autowants: - for aw in autowants: - autowant.append({"ComicName": aw['ComicName'], - "IssueNumber": aw['IssueNumber'], - "Publisher": aw['Publisher'], - "Status": aw['Status'], - "DisplayComicName": aw['DisplayComicName']}) + if generateonly is False: + autowants = myDB.select("SELECT * FROM futureupcoming WHERE Status='Wanted'") + if autowants: + for aw in autowants: + autowant.append({"ComicName": aw['ComicName'], + "IssueNumber": aw['IssueNumber'], + "Publisher": aw['Publisher'], + "Status": aw['Status'], + "DisplayComicName": aw['DisplayComicName']}) weeklyresults = [] wantedcount = 0 @@ -1644,7 +1645,7 @@ class WebInterface(object): else: logger.warn('Unable to populate the pull-list. Not continuing at this time (will try again in abit)') - if w_results is None: + if all([w_results is None, generateonly is False]): return serve_template(templatename="weeklypull.html", title="Weekly Pull", weeklyresults=weeklyresults, pullfilter=True, weekfold=weekinfo['week_folder'], wantedcount=0, weekinfo=weekinfo) watchlibrary = helpers.listLibrary() @@ -1676,8 +1677,11 @@ class WebInterface(object): haveit = "No" linkit = None - if all([weekly['ComicID'] is not None, weekly['ComicID'] != '']) and haveit == 'No': + if all([weekly['ComicID'] is not None, weekly['ComicID'] != '', haveit == 'No']) or haveit == 'OneOff': linkit = 'http://comicvine.gamespot.com/volume/4050-' + str(weekly['ComicID']) + else: + #setting it here will force it to set the link to the right comicid regardless of annuals or not + linkit = haveit x = None try: @@ -1731,10 +1735,14 @@ class WebInterface(object): weeklyresults = sorted(weeklyresults, key=itemgetter('PUBLISHER', 'COMIC'), reverse=False) else: self.manualpull() - if week: - return serve_template(templatename="weeklypull.html", title="Weekly Pull", weeklyresults=weeklyresults, pullfilter=True, weekfold=weekinfo['week_folder'], wantedcount=wantedcount, weekinfo=weekinfo) + + if generateonly is True: + return weeklyresults, weekinfo else: - return serve_template(templatename="weeklypull.html", title="Weekly Pull", weeklyresults=weeklyresults, pullfilter=True, weekfold=weekinfo['week_folder'], wantedcount=wantedcount, weekinfo=weekinfo) + if week: + return serve_template(templatename="weeklypull.html", title="Weekly Pull", weeklyresults=weeklyresults, pullfilter=True, weekfold=weekinfo['week_folder'], wantedcount=wantedcount, weekinfo=weekinfo) + else: + return serve_template(templatename="weeklypull.html", title="Weekly Pull", weeklyresults=weeklyresults, pullfilter=True, weekfold=weekinfo['week_folder'], wantedcount=wantedcount, weekinfo=weekinfo) pullist.exposed = True def removeautowant(self, comicname, release): @@ -2113,55 +2121,81 @@ class WebInterface(object): annualDelete.exposed = True + def previewRename(self, comicidlist): + myDB = db.DBConnection() + resultlist = [] + for comicid in comicidlist: + comic = myDB.selectone("SELECT * FROM comics WHERE ComicID=?", [comicid]).fetchone() + comicdir = comic['ComicLocation'] + comicname = comic['ComicName'] + issue = myDB.selectone("SELECT * FROM issues WHERE ComicID=? AND Location is not None ORDER BY ReleaseDate", [comicid]).fetchone() + if 'annual' in issue['Location'].lower(): + annualize = 'yes' + else: + annualize = None + renameiss = helpers.rename_param(comicid, comicname, issue['Issue_Number'], issue['Location'], comicyear=None, issueid=issue['IssueID'], annualize=annualize) + resultlist.append({'original': issue['Location'], + 'new': renameiss['nfilename']}) + + + previewRename.exposed = True + def manualRename(self, comicid): if mylar.FILE_FORMAT == '': logger.error("You haven't specified a File Format in Configuration/Advanced") logger.error("Cannot rename files.") return + if len(comicid) > 1: + comiclist = comicid + else: + comiclist = [] + comiclist.append(comicid) myDB = db.DBConnection() - comic = myDB.selectone("SELECT * FROM comics WHERE ComicID=?", [comicid]).fetchone() - comicdir = comic['ComicLocation'] - comicname = comic['ComicName'] - extensions = ('.cbr', '.cbz', '.cb7') - issues = myDB.select("SELECT * FROM issues WHERE ComicID=?", [comicid]) - if mylar.ANNUALS_ON: - issues += myDB.select("SELECT * FROM annuals WHERE ComicID=?", [comicid]) - comfiles = [] filefind = 0 - if mylar.MULTIPLE_DEST_DIRS is not None and mylar.MULTIPLE_DEST_DIRS != 'None' and os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(comicdir)) != comicdir: - logger.fdebug('multiple_dest_dirs:' + mylar.MULTIPLE_DEST_DIRS) - logger.fdebug('dir: ' + comicdir) - logger.fdebug('os.path.basename: ' + os.path.basename(comicdir)) - pathdir = os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(comicdir)) + for comicid in comiclist: + comic = myDB.selectone("SELECT * FROM comics WHERE ComicID=?", [comicid]).fetchone() + comicdir = comic['ComicLocation'] + comicname = comic['ComicName'] + extensions = ('.cbr', '.cbz', '.cb7') + issues = myDB.select("SELECT * FROM issues WHERE ComicID=?", [comicid]) + if mylar.ANNUALS_ON: + issues += myDB.select("SELECT * FROM annuals WHERE ComicID=?", [comicid]) + if mylar.MULTIPLE_DEST_DIRS is not None and mylar.MULTIPLE_DEST_DIRS != 'None' and os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(comicdir)) != comicdir: + logger.fdebug('multiple_dest_dirs:' + mylar.MULTIPLE_DEST_DIRS) + logger.fdebug('dir: ' + comicdir) + logger.fdebug('os.path.basename: ' + os.path.basename(comicdir)) + pathdir = os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(comicdir)) - for root, dirnames, filenames in os.walk(comicdir): - for filename in filenames: - if filename.lower().endswith(extensions): - #logger.info("filename being checked is : " + str(filename)) - for issue in issues: - if issue['Location'] == filename: - #logger.error("matched " + str(filename) + " to DB file " + str(issue['Location'])) - if 'annual' in issue['Location'].lower(): - annualize = 'yes' - else: - annualize = None - renameiss = helpers.rename_param(comicid, comicname, issue['Issue_Number'], filename, comicyear=None, issueid=issue['IssueID'], annualize=annualize) - nfilename = renameiss['nfilename'] - srciss = os.path.join(comicdir, filename) - if filename != nfilename: - logger.info('Renaming ' + filename + ' ... to ... ' + renameiss['nfilename']) - try: - shutil.move(srciss, renameiss['destination_dir']) - except (OSError, IOError): - logger.error('Failed to move files - check directories and manually re-run.') - return - filefind+=1 - else: - logger.info('Not renaming ' + filename + ' as it is in desired format already.') - #continue + for root, dirnames, filenames in os.walk(comicdir): + for filename in filenames: + if filename.lower().endswith(extensions): + #logger.info("filename being checked is : " + str(filename)) + for issue in issues: + if issue['Location'] == filename: + #logger.error("matched " + str(filename) + " to DB file " + str(issue['Location'])) + if 'annual' in issue['Location'].lower(): + annualize = 'yes' + else: + annualize = None + renameiss = helpers.rename_param(comicid, comicname, issue['Issue_Number'], filename, comicyear=None, issueid=issue['IssueID'], annualize=annualize) + nfilename = renameiss['nfilename'] + srciss = os.path.join(comicdir, filename) + if filename != nfilename: + logger.info('Renaming ' + filename + ' ... to ... ' + renameiss['nfilename']) + try: + shutil.move(srciss, renameiss['destination_dir']) + except (OSError, IOError): + logger.error('Failed to move files - check directories and manually re-run.') + return + filefind+=1 + else: + logger.info('Not renaming ' + filename + ' as it is in desired format already.') + #continue logger.info('I have renamed ' + str(filefind) + ' issues of ' + comicname) updater.forceRescan(comicid) + if len(comiclist) > 1: + logger.info('[RENAMER] %s series have been renamed.' % len(comiclist)) manualRename.exposed = True def searchScan(self, name): @@ -2170,9 +2204,108 @@ class WebInterface(object): def manage(self): mylarRoot = mylar.DESTINATION_DIR - return serve_template(templatename="manage.html", title="Manage", mylarRoot=mylarRoot) + import db + myDB = db.DBConnection() + jobresults = myDB.select('SELECT * FROM jobhistory') + if jobresults is not None: + tmp = [] + for jb in jobresults: + if jb['prev_run_datetime'] is not None: + try: + pr = (datetime.datetime.strptime(jb['prev_run_datetime'][:19], '%Y-%m-%d %H:%M:%S') - datetime.datetime.utcfromtimestamp(0)).total_seconds() + except ValueError: + pr = (datetime.datetime.strptime(jb['prev_run_datetime'], '%Y-%m-%d %H:%M:%S.%f') - datetime.datetime.utcfromtimestamp(0)).total_seconds() + prev_run = datetime.datetime.fromtimestamp(pr) + else: + prev_run = None + if jb['prev_run_datetime'] is not None: + try: + nr = (datetime.datetime.strptime(jb['next_run_datetime'][:19], '%Y-%m-%d %H:%M:%S') - datetime.datetime.utcfromtimestamp(0)).total_seconds() + except ValueError: + nr = (datetime.datetime.strptime(jb['next_run_datetime'], '%Y-%m-%d %H:%M:%S.%f') - datetime.datetime.utcfromtimestamp(0)).total_seconds() + next_run = datetime.datetime.fromtimestamp(nr) + else: + next_run = None + if 'rss' in jb['JobName'].lower(): + status = mylar.RSS_STATUS + interval = str(mylar.RSS_CHECKINTERVAL) + ' mins' + if 'weekly' in jb['JobName'].lower(): + status = mylar.WEEKLY_STATUS + if mylar.ALT_PULL == 2: interval = '4 hrs' + else: interval = '24 hrs' + if 'search' in jb['JobName'].lower(): + status = mylar.SEARCH_STATUS + interval = str(mylar.SEARCH_INTERVAL) + ' mins' + if 'updater' in jb['JobName'].lower(): + status = mylar.UPDATER_STATUS + interval = '5 mins' + if 'folder' in jb['JobName'].lower(): + status = mylar.MONITOR_STATUS + interval = str(mylar.DOWNLOAD_SCAN_INTERVAL) + ' mins' + if 'version' in jb['JobName'].lower(): + status = mylar.VERSION_STATUS + interval = str(mylar.CHECK_GITHUB_INTERVAL) + 'mins' + + tmp.append({'prev_run_datetime': prev_run, + 'next_run_datetime': next_run, + 'interval': interval, + 'jobname': jb['JobName'], + 'status': status}) + jobresults = tmp + return serve_template(templatename="manage.html", title="Manage", mylarRoot=mylarRoot, jobs=jobresults) manage.exposed = True + def jobmanage(self, job, mode): + logger.info('%s : %s' % (job, mode)) + jobid = None + job_id_map = {'DB Updater': 'dbupdater', 'Auto-Search': 'search', 'RSS Feeds': 'rss', 'Weekly Pullist': 'weekly', 'Check Version': 'version', 'Folder Monitor': 'monitor'} + for k,v in job_id_map.iteritems(): + if k == job: + jobid = v + break + logger.info('jobid: %s' % jobid) + if jobid is not None: + myDB = db.DBConnection() + if mode == 'pause': + mylar.SCHED.pause_job(jobid) + logger.info('[%s] Paused scheduled runtime.' % job) + ctrl = {'JobName': job} + val = {'Status': 'Paused'} + myDB.upsert('jobhistory', val, ctrl) + elif mode == 'resume': + mylar.SCHED.resume_job(jobid) + logger.info('[%s] Resumed scheduled runtime.' % job) + ctrl = {'JobName': job} + val = {'Status': 'Waiting'} + myDB.upsert('jobhistory', val, ctrl) + + else: + logger.warn('%s cannot be matched against any scheduled jobs - maybe you should restart?' % job) + + jobmanage.exposed = True + + def schedulerForceCheck(self, jobid): + from apscheduler.triggers.date import DateTrigger + for jb in mylar.SCHED.get_jobs(): + #logger.info('jb : %s' % jb) + if jobid.lower() in str(jb).lower(): + logger.info('[%s] Now force submitting job.' % jb) + if jobid == 'rss': + mylar.SCHED.add_job(func=jb.func, args=[True], trigger=DateTrigger(run_date=datetime.datetime.now())) + elif jobid == 'weekly': + mylar.SCHED.add_job(func=jb.func, trigger=DateTrigger(run_date=datetime.datetime.now())) + elif jobid == 'search': + mylar.SCHED.add_job(func=jb.func, trigger=DateTrigger(run_date=datetime.datetime.now())) + elif jobid == 'version': + mylar.SCHED.add_job(func=jb.func, trigger=DateTrigger(run_date=datetime.datetime.now())) + elif jobid == 'updater': + mylar.SCHED.add_job(func=jb.func, trigger=DateTrigger(run_date=datetime.datetime.now())) + elif jobid == 'monitor': + mylar.SCHED.add_job(func=jb.func, trigger=DateTrigger(run_date=datetime.datetime.now())) + break + + schedulerForceCheck.exposed = True + def manageComics(self): comics = helpers.havetotals() return serve_template(templatename="managecomics.html", title="Manage Comics", comics=comics) @@ -2223,12 +2356,6 @@ class WebInterface(object): return serve_template(templatename="managefailed.html", title="Failed DB Management", failed=results) manageFailed.exposed = True - def manageNew(self): - myDB = db.DBConnection() - newcomics = myDB.select('SELECT * from newartists') - return serve_template(templatename="managenew.html", title="Manage New Artists", newcomics=newcomics) - manageNew.exposed = True - def flushImports(self): myDB = db.DBConnection() myDB.action('DELETE from importresults') @@ -2345,6 +2472,9 @@ class WebInterface(object): elif action == 'metatag': logger.info('[MANAGE COMICS][MASS METATAGGING] Now Metatagging Files for ' + str(len(comicsToAdd)) + ' series') threading.Thread(target=self.forceRescan, args=[comicsToAdd,True,'metatag']).start() + elif action == 'rename': + logger.info('[MANAGE COMICS][MASS RENAMING] Now Renaming Files for ' + str(len(comicsToAdd)) + ' series') + threading.Thread(target=self.manualRename, args=[comicsToAdd]).start() else: logger.info('[MANAGE COMICS][REFRESH] Refreshing ' + str(len(comicsToAdd)) + ' series') threading.Thread(target=updater.dbUpdate, args=[comicsToAdd]).start() @@ -3361,7 +3491,7 @@ class WebInterface(object): myDB = db.DBConnection() if type == 'all': logger.info(u"Clearing all history") - myDB.action('DELETE from snatched') + myDB.action('DELETE * from snatched') else: logger.info(u"Clearing history where status is %s" % type) myDB.action('DELETE from snatched WHERE Status=?', [type]) @@ -3517,6 +3647,9 @@ class WebInterface(object): #save the values so they stick. mylar.ADD_COMICS = autoadd + if 'windows' in mylar.OS_DETECT.lower(): + #to handle long paths, let's append the '\\?\' to the path to allow for unicode windows api access + path = "\\\\?\\" + path mylar.COMIC_DIR = path mylar.IMP_MOVE = imp_move mylar.IMP_RENAME = imp_rename @@ -4125,7 +4258,6 @@ class WebInterface(object): "COUNT_HAVES": COUNT_HAVES, "COUNT_ISSUES": COUNT_ISSUES, "COUNT_SIZE": COUNT_SIZE} - config = { "comicvine_api": mylar.COMICVINE_API, "http_host": mylar.HTTP_HOST, @@ -4224,6 +4356,7 @@ class WebInterface(object): "extra_newznabs": sorted(mylar.EXTRA_NEWZNABS, key=itemgetter(5), reverse=True), "enable_rss": helpers.checked(mylar.ENABLE_RSS), "rss_checkinterval": mylar.RSS_CHECKINTERVAL, + "rss_last": datetime.datetime.fromtimestamp(mylar.SCHED_RSS_LAST).replace(microsecond=0), "provider_order": mylar.PROVIDER_ORDER, "enable_torrents": helpers.checked(mylar.ENABLE_TORRENTS), "minseeds": mylar.MINSEEDS, @@ -5005,7 +5138,7 @@ class WebInterface(object): getComicArtwork.exposed = True def findsabAPI(self, sabhost=None, sabusername=None, sabpassword=None): - import sabparse + from mylar import sabparse sabapi = sabparse.sabnzbd(sabhost, sabusername, sabpassword) logger.info('SAB NZBKey found as : ' + str(sabapi) + '. You still have to save the config to retain this setting.') mylar.SAB_APIKEY = sabapi @@ -5112,7 +5245,7 @@ class WebInterface(object): IssueInfo.exposed = True - def manual_metatag(self, dirName, issueid, filename, comicid, comversion, seriesyear=None): + def manual_metatag(self, dirName, issueid, filename, comicid, comversion, seriesyear=None, group=False): module = '[MANUAL META-TAGGING]' try: import cmtagmylar @@ -5148,7 +5281,8 @@ class WebInterface(object): else: logger.fdebug('Failed to remove temporary directory: ' + cache_dir) - updater.forceRescan(comicid) + if group is False: + updater.forceRescan(comicid) manual_metatag.exposed = True @@ -5165,7 +5299,8 @@ class WebInterface(object): meta_dir = dirName for ginfo in groupinfo: #if multiple_dest_dirs is in effect, metadir will be pointing to the wrong location and cause a 'Unable to create temporary cache location' error message - self.manual_metatag(meta_dir, ginfo['IssueID'], os.path.join(meta_dir, ginfo['Location']), ComicID, comversion=cinfo['ComicVersion'], seriesyear=cinfo['ComicYear']) + self.manual_metatag(meta_dir, ginfo['IssueID'], os.path.join(meta_dir, ginfo['Location']), ComicID, comversion=cinfo['ComicVersion'], seriesyear=cinfo['ComicYear'], group=True) + updater.forceRescan(ComicID) logger.info('[SERIES-METATAGGER][' + cinfo['ComicName'] + ' (' + cinfo['ComicYear'] + ')] Finished doing a complete series (re)tagging of metadata.') group_metatag.exposed = True @@ -5343,9 +5478,98 @@ class WebInterface(object): import auth32p tmp = auth32p.info32p(test=True) rtnvalues = tmp.authenticate() - if rtnvalues is True: - return "Successfully Authenticated." + if rtnvalues['status'] is True: + return json.dumps({"status": "Successfully Authenticated.", "inkdrops": mylar.INKDROPS_32P}) else: - return "Could not Authenticate." + return json.dumps({"status": "Could not Authenticate.", "inkdrops": mylar.INKDROPS_32P}) test_32p.exposed = True + + def create_readlist(self, list=None, weeknumber=None, year=None): + # ({ + # "PUBLISHER": weekly['PUBLISHER'], + # "ISSUE": weekly['ISSUE'], + # "COMIC": weekly['COMIC'], + # "STATUS": tmp_status, + # "COMICID": weekly['ComicID'], + # "ISSUEID": weekly['IssueID'], + # "HAVEIT": haveit, + # "LINK": linkit, + # "AUTOWANT": False + # }) + issuelist = [] + logger.info('weeknumber: %s' % weeknumber) + logger.info('year: %s' % year) + weeklyresults = [] + if weeknumber is not None: + myDB = db.DBConnection() + w_results = myDB.select("SELECT * from weekly WHERE weeknumber=? AND year=?", [int(weeknumber),int(year)]) + watchlibrary = helpers.listLibrary() + issueLibrary = helpers.listIssues(weeknumber, year) + oneofflist = helpers.listoneoffs(weeknumber, year) + for weekly in w_results: + xfound = False + tmp_status = weekly['Status'] + issdate = None + if weekly['ComicID'] in watchlibrary: + haveit = watchlibrary[weekly['ComicID']] + + if all([mylar.AUTOWANT_UPCOMING, tmp_status == 'Skipped']): + tmp_status = 'Wanted' + + for x in issueLibrary: + if weekly['IssueID'] == x['IssueID']: + xfound = True + tmp_status = x['Status'] + issdate = x['IssueYear'] + break + + else: + xlist = [x['Status'] for x in oneofflist if x['IssueID'] == weekly['IssueID']] + if xlist: + haveit = 'OneOff' + tmp_status = xlist[0] + issdate = None + else: + haveit = "No" + + x = None + try: + x = float(weekly['ISSUE']) + except ValueError, e: + if 'au' in weekly['ISSUE'].lower() or 'ai' in weekly['ISSUE'].lower() or '.inh' in weekly['ISSUE'].lower() or '.now' in weekly['ISSUE'].lower() or '.mu' in weekly['ISSUE'].lower(): + x = weekly['ISSUE'] + + if x is not None: + weeklyresults.append({ + "PUBLISHER": weekly['PUBLISHER'], + "ISSUE": weekly['ISSUE'], + "COMIC": weekly['COMIC'], + "STATUS": tmp_status, + "COMICID": weekly['ComicID'], + "ISSUEID": weekly['IssueID'], + "HAVEIT": haveit, + "ISSUEDATE": issdate + }) + weeklylist = sorted(weeklyresults, key=itemgetter('PUBLISHER', 'COMIC'), reverse=False) + for ab in weeklylist: + if ab['HAVEIT'] == ab['COMICID']: + lb = myDB.selectone('SELECT ComicVersion, Type, ComicYear from comics WHERE ComicID=?', [ab['COMICID']]).fetchone() + issuelist.append({'IssueNumber': ab['ISSUE'], + 'ComicName': ab['COMIC'], + 'ComicID': ab['COMICID'], + 'IssueID': ab['ISSUEID'], + 'Status': ab['STATUS'], + 'Publisher': ab['PUBLISHER'], + 'ComicVolume': lb['ComicVersion'], + 'ComicYear': lb['ComicYear'], + 'ComicType': lb['Type'], + 'IssueYear': ab['ISSUEDATE']}) + + from mylar import cbl + ab = cbl.dict2xml(issuelist) + #a = cbl.CreateList(issuelist) + #ab = a.createComicRackReadlist() + logger.info('returned.') + logger.info(ab) + create_readlist.exposed = True diff --git a/mylar/weeklypull.py b/mylar/weeklypull.py index fe32a4ff..948f3b67 100755 --- a/mylar/weeklypull.py +++ b/mylar/weeklypull.py @@ -1366,151 +1366,150 @@ def future_check(): chkfuture = myDB.select("SELECT * FROM futureupcoming WHERE IssueNumber='1' OR IssueNumber='0'") #is not NULL") if chkfuture is None: logger.info("There are not any series on your future-list that I consider to be a NEW series") - return - - cflist = [] - #load in the values on an entry-by-entry basis into a tuple, so that we can query the sql clean again. - for cf in chkfuture: - cflist.append({"ComicName": cf['ComicName'], - "IssueDate": cf['IssueDate'], - "IssueNumber": cf['IssueNumber'], #this should be all #1's as the sql above limits the hits. - "Publisher": cf['Publisher'], - "Status": cf['Status']}) - logger.fdebug('cflist: ' + str(cflist)) - #now we load in - if len(cflist) == 0: - logger.info('No series have been marked as being on auto-watch.') - return - logger.info('I will be looking to see if any information has been released for ' + str(len(cflist)) + ' series that are NEW series') - #limit the search to just the 'current year' since if it's anything but a #1, it should have associated data already. - #limittheyear = [] - #limittheyear.append(cf['IssueDate'][-4:]) - search_results = [] - - for ser in cflist: - matched = False - theissdate = ser['IssueDate'][-4:] - if not theissdate.startswith('20'): - theissdate = ser['IssueDate'][:4] - logger.info('looking for new data for ' + ser['ComicName'] + '[#' + str(ser['IssueNumber']) + '] (' + str(theissdate) + ')') - searchresults, explicit = mb.findComic(ser['ComicName'], mode='pullseries', issue=ser['IssueNumber'], limityear=theissdate, explicit='all') - if len(searchresults) > 0: - if len(searchresults) > 1: - logger.info('More than one result returned - this may have to be a manual add, but I\'m going to try to figure it out myself first.') - matches = [] - logger.fdebug('Publisher of series to be added: ' + str(ser['Publisher'])) - for sr in searchresults: - logger.fdebug('Comparing ' + sr['name'] + ' - to - ' + ser['ComicName']) - tmpsername = re.sub('[\'\*\^\%\$\#\@\!\/\,\.\:\(\)]', '', ser['ComicName']).strip() - tmpsrname = re.sub('[\'\*\^\%\$\#\@\!\/\,\.\:\(\)]', '', sr['name']).strip() - tmpsername = re.sub('\-', '', tmpsername) - if tmpsername.lower().startswith('the '): - tmpsername = re.sub('the ', '', tmpsername.lower()).strip() - else: - tmpsername = re.sub(' the ', '', tmpsername.lower()).strip() - tmpsrname = re.sub('\-', '', tmpsrname) - if tmpsrname.lower().startswith('the '): - tmpsrname = re.sub('the ', '', tmpsrname.lower()).strip() - else: - tmpsrname = re.sub(' the ', '', tmpsrname.lower()).strip() - - tmpsername = re.sub(' and ', '', tmpsername.lower()).strip() - tmpsername = re.sub(' & ', '', tmpsername.lower()).strip() - tmpsrname = re.sub(' and ', '', tmpsrname.lower()).strip() - tmpsrname = re.sub(' & ', '', tmpsrname.lower()).strip() - - #append the cleaned-up name to get searched later against if necessary. - search_results.append({'name': tmpsrname, - 'comicid': sr['comicid']}) - - tmpsername = re.sub('\s', '', tmpsername).strip() - tmpsrname = re.sub('\s', '', tmpsrname).strip() - - logger.fdebug('Comparing modified names: ' + tmpsrname + ' - to - ' + tmpsername) - if tmpsername.lower() == tmpsrname.lower(): - logger.fdebug('Name matched successful: ' + sr['name']) - if str(sr['comicyear']) == str(theissdate): - logger.fdebug('Matched to : ' + str(theissdate)) - matches.append(sr) - - if len(matches) == 1: - logger.info('Narrowed down to one series as a direct match: ' + matches[0]['name'] + '[' + str(matches[0]['comicid']) + ']') - cid = matches[0]['comicid'] - matched = True - else: - logger.info('Unable to determine a successful match at this time (this is still a WIP so it will eventually work). Not going to attempt auto-adding at this time.') - catch_words = ('the', 'and', '&', 'to') - for pos_match in search_results: - logger.info(pos_match) - length_match = len(pos_match['name']) / len(ser['ComicName']) - logger.fdebug('length match differential set for an allowance of 20%') - logger.fdebug('actual differential in length between result and series title: ' + str((length_match * 100)-100) + '%') - if ((length_match * 100)-100) > 20: - logger.fdebug('there are too many extra words to consider this as match for the given title. Ignoring this result.') - continue - new_match = pos_match['name'].lower() - split_series = ser['ComicName'].lower().split() - for cw in catch_words: - for x in new_match.split(): - #logger.fdebug('comparing x: ' + str(x) + ' to cw: ' + str(cw)) - if x == cw: - new_match = re.sub(x, '', new_match) - - split_match = new_match.split() - word_match = 0 - i = 0 - for ss in split_series: - try: - matchword = split_match[i].lower() - except: - break - - if any([x == matchword for x in catch_words]): - #logger.fdebug('[MW] common word detected of : ' + matchword) - word_match+=.5 - elif any([cw == ss for cw in catch_words]): - #logger.fdebug('[CW] common word detected of : ' + matchword) - word_match+=.5 - else: - try: - #will return word position in string. - #logger.fdebug('word match to position found in both strings at position : ' + str(split_match.index(ss))) - if split_match.index(ss) == split_series.index(ss): - word_match+=1 - except ValueError: - break - i+=1 - logger.fdebug('word match score of : ' + str(word_match) + ' / ' + str(len(split_series))) - if word_match == len(split_series) or (word_match / len(split_series)) > 80: - logger.fdebug('[' + pos_match['name'] + '] considered a match - word matching percentage is greater than 80%. Attempting to auto-add series into watchlist.') - cid = pos_match['comicid'] - matched = True - - if matched: - #we should probably load all additional issues for the series on the futureupcoming list that are marked as Wanted and then - #throw them to the importer as a tuple, and once imported the import can run the additional search against them. - #now we scan for additional issues of the same series on the upcoming list and mark them accordingly. - chkthewanted = [] - chkwant = myDB.select("SELECT * FROM futureupcoming WHERE ComicName=? AND IssueNumber != '1' AND Status='Wanted'", [ser['ComicName']]) - if chkwant is None: - logger.info('No extra issues to mark at this time for ' + ser['ComicName']) - else: - for chk in chkwant: - chkthewanted.append({"ComicName": chk['ComicName'], - "IssueDate": chk['IssueDate'], - "IssueNumber": chk['IssueNumber'], #this should be all #1's as the sql above limits the hits. - "Publisher": chk['Publisher'], - "Status": chk['Status']}) - - logger.info('Marking ' + str(len(chkthewanted)) + ' additional issues as Wanted from ' + ser['ComicName'] + ' series as requested.') - - future_check_add(cid, ser, chkthewanted, theissdate) - + else: + cflist = [] + #load in the values on an entry-by-entry basis into a tuple, so that we can query the sql clean again. + for cf in chkfuture: + cflist.append({"ComicName": cf['ComicName'], + "IssueDate": cf['IssueDate'], + "IssueNumber": cf['IssueNumber'], #this should be all #1's as the sql above limits the hits. + "Publisher": cf['Publisher'], + "Status": cf['Status']}) + logger.fdebug('cflist: ' + str(cflist)) + #now we load in + if len(cflist) == 0: + logger.info('No series have been marked as being on auto-watch.') else: - logger.info('No series information available as of yet for ' + ser['ComicName'] + '[#' + str(ser['IssueNumber']) + '] (' + str(theissdate) + ')') - continue + logger.info('I will be looking to see if any information has been released for ' + str(len(cflist)) + ' series that are NEW series') + #limit the search to just the 'current year' since if it's anything but a #1, it should have associated data already. + #limittheyear = [] + #limittheyear.append(cf['IssueDate'][-4:]) + search_results = [] - logger.info('Finished attempting to auto-add new series.') + for ser in cflist: + matched = False + theissdate = ser['IssueDate'][-4:] + if not theissdate.startswith('20'): + theissdate = ser['IssueDate'][:4] + logger.info('looking for new data for ' + ser['ComicName'] + '[#' + str(ser['IssueNumber']) + '] (' + str(theissdate) + ')') + searchresults, explicit = mb.findComic(ser['ComicName'], mode='pullseries', issue=ser['IssueNumber'], limityear=theissdate, explicit='all') + if len(searchresults) > 0: + if len(searchresults) > 1: + logger.info('More than one result returned - this may have to be a manual add, but I\'m going to try to figure it out myself first.') + matches = [] + logger.fdebug('Publisher of series to be added: ' + str(ser['Publisher'])) + for sr in searchresults: + logger.fdebug('Comparing ' + sr['name'] + ' - to - ' + ser['ComicName']) + tmpsername = re.sub('[\'\*\^\%\$\#\@\!\/\,\.\:\(\)]', '', ser['ComicName']).strip() + tmpsrname = re.sub('[\'\*\^\%\$\#\@\!\/\,\.\:\(\)]', '', sr['name']).strip() + tmpsername = re.sub('\-', '', tmpsername) + if tmpsername.lower().startswith('the '): + tmpsername = re.sub('the ', '', tmpsername.lower()).strip() + else: + tmpsername = re.sub(' the ', '', tmpsername.lower()).strip() + tmpsrname = re.sub('\-', '', tmpsrname) + if tmpsrname.lower().startswith('the '): + tmpsrname = re.sub('the ', '', tmpsrname.lower()).strip() + else: + tmpsrname = re.sub(' the ', '', tmpsrname.lower()).strip() + + tmpsername = re.sub(' and ', '', tmpsername.lower()).strip() + tmpsername = re.sub(' & ', '', tmpsername.lower()).strip() + tmpsrname = re.sub(' and ', '', tmpsrname.lower()).strip() + tmpsrname = re.sub(' & ', '', tmpsrname.lower()).strip() + + #append the cleaned-up name to get searched later against if necessary. + search_results.append({'name': tmpsrname, + 'comicid': sr['comicid']}) + + tmpsername = re.sub('\s', '', tmpsername).strip() + tmpsrname = re.sub('\s', '', tmpsrname).strip() + + logger.fdebug('Comparing modified names: ' + tmpsrname + ' - to - ' + tmpsername) + if tmpsername.lower() == tmpsrname.lower(): + logger.fdebug('Name matched successful: ' + sr['name']) + if str(sr['comicyear']) == str(theissdate): + logger.fdebug('Matched to : ' + str(theissdate)) + matches.append(sr) + + if len(matches) == 1: + logger.info('Narrowed down to one series as a direct match: ' + matches[0]['name'] + '[' + str(matches[0]['comicid']) + ']') + cid = matches[0]['comicid'] + matched = True + else: + logger.info('Unable to determine a successful match at this time (this is still a WIP so it will eventually work). Not going to attempt auto-adding at this time.') + catch_words = ('the', 'and', '&', 'to') + for pos_match in search_results: + logger.info(pos_match) + length_match = len(pos_match['name']) / len(ser['ComicName']) + logger.fdebug('length match differential set for an allowance of 20%') + logger.fdebug('actual differential in length between result and series title: ' + str((length_match * 100)-100) + '%') + if ((length_match * 100)-100) > 20: + logger.fdebug('there are too many extra words to consider this as match for the given title. Ignoring this result.') + continue + new_match = pos_match['name'].lower() + split_series = ser['ComicName'].lower().split() + for cw in catch_words: + for x in new_match.split(): + #logger.fdebug('comparing x: ' + str(x) + ' to cw: ' + str(cw)) + if x == cw: + new_match = re.sub(x, '', new_match) + + split_match = new_match.split() + word_match = 0 + i = 0 + for ss in split_series: + try: + matchword = split_match[i].lower() + except: + break + + if any([x == matchword for x in catch_words]): + #logger.fdebug('[MW] common word detected of : ' + matchword) + word_match+=.5 + elif any([cw == ss for cw in catch_words]): + #logger.fdebug('[CW] common word detected of : ' + matchword) + word_match+=.5 + else: + try: + #will return word position in string. + #logger.fdebug('word match to position found in both strings at position : ' + str(split_match.index(ss))) + if split_match.index(ss) == split_series.index(ss): + word_match+=1 + except ValueError: + break + i+=1 + logger.fdebug('word match score of : ' + str(word_match) + ' / ' + str(len(split_series))) + if word_match == len(split_series) or (word_match / len(split_series)) > 80: + logger.fdebug('[' + pos_match['name'] + '] considered a match - word matching percentage is greater than 80%. Attempting to auto-add series into watchlist.') + cid = pos_match['comicid'] + matched = True + + if matched: + #we should probably load all additional issues for the series on the futureupcoming list that are marked as Wanted and then + #throw them to the importer as a tuple, and once imported the import can run the additional search against them. + #now we scan for additional issues of the same series on the upcoming list and mark them accordingly. + chkthewanted = [] + chkwant = myDB.select("SELECT * FROM futureupcoming WHERE ComicName=? AND IssueNumber != '1' AND Status='Wanted'", [ser['ComicName']]) + if chkwant is None: + logger.info('No extra issues to mark at this time for ' + ser['ComicName']) + else: + for chk in chkwant: + chkthewanted.append({"ComicName": chk['ComicName'], + "IssueDate": chk['IssueDate'], + "IssueNumber": chk['IssueNumber'], #this should be all #1's as the sql above limits the hits. + "Publisher": chk['Publisher'], + "Status": chk['Status']}) + + logger.info('Marking ' + str(len(chkthewanted)) + ' additional issues as Wanted from ' + ser['ComicName'] + ' series as requested.') + + future_check_add(cid, ser, chkthewanted, theissdate) + + else: + logger.info('No series information available as of yet for ' + ser['ComicName'] + '[#' + str(ser['IssueNumber']) + '] (' + str(theissdate) + ')') + continue + + logger.info('Finished attempting to auto-add new series.') return def future_check_add(comicid, serinfo, chkthewanted=None, theissdate=None): diff --git a/mylar/weeklypullit.py b/mylar/weeklypullit.py index 4766ff74..5bbd6563 100755 --- a/mylar/weeklypullit.py +++ b/mylar/weeklypullit.py @@ -17,9 +17,7 @@ from __future__ import with_statement import mylar -from mylar import logger - -#import threading +from mylar import logger, helpers, weeklypull class Weekly(): def __init__(self): @@ -27,6 +25,10 @@ class Weekly(): def run(self): logger.info('[WEEKLY] Checking Weekly Pull-list for new releases/updates') - mylar.weeklypull.pullit() - mylar.weeklypull.future_check() - return + helpers.job_management(write=True, job='Weekly Pullist', current_run=helpers.utctimestamp(), status='Running') + mylar.WEEKLY_STATUS = 'Running' + weeklypull.pullit() + weeklypull.future_check() + helpers.job_management(write=True, job='Weekly Pullist', last_run_completed=helpers.utctimestamp(), status='Waiting') + mylar.WEEKLY_STATUS = 'Waiting' + diff --git a/post-processing/torrent-auto-snatch/getlftp.sh b/post-processing/torrent-auto-snatch/getlftp.sh index f60ffaaf..1fcafad8 100755 --- a/post-processing/torrent-auto-snatch/getlftp.sh +++ b/post-processing/torrent-auto-snatch/getlftp.sh @@ -25,12 +25,12 @@ fi source "$configfile" cd $LOCALCD -filename="$1" +filename="$downlocation" if [[ "${filename##*.}" == "cbr" || "${filename##*.}" == "cbz" ]]; then - LCMD="pget -n 6 '$1'" + LCMD="pget -n 6 '$filename'" else - LCMD="mirror -P 2 --use-pget-n=6 '$1'" + LCMD="mirror -P 2 --use-pget-n=6 '$filename'" fi if [[ -z $KEYFILE ]]; then